From: Vasudev Kamath Date: Sat, 2 Jun 2018 09:40:38 +0000 (+0530) Subject: Import cargo_0.27.0.orig.tar.gz X-Git-Tag: archive/raspbian/0.35.0-2+rpi1~3^2^2^2^2^2^2^2^2~1^2^2~6 X-Git-Url: https://dgit.raspbian.org/?a=commitdiff_plain;h=0ac5389f0ea11332c46d1ebe7d09868edcbec9e4;p=cargo.git Import cargo_0.27.0.orig.tar.gz [dgit import orig cargo_0.27.0.orig.tar.gz] --- 0ac5389f0ea11332c46d1ebe7d09868edcbec9e4 diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..85e363a37 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +/target +Cargo.lock +.cargo +/config.stamp +/Makefile +/config.mk +src/doc/build +src/etc/*.pyc +src/registry/target +rustc +__pycache__ +.idea/ +*.iml +*.swp diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..a9b85bc90 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,54 @@ +language: rust +rust: stable +sudo: required +dist: trusty + +git: + depth: 1 + +# Using 'cache: cargo' to cache target/ and all of $HOME/.cargo/ +# doesn't work well: the cache is large and it takes several minutes +# to move it to and from S3. So instead we only cache the mdbook +# binary. +cache: + directories: + - $HOME/.cargo/bin/ + +matrix: + include: + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + - env: TARGET=x86_64-apple-darwin + ALT=i686-apple-darwin + os: osx + + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + rust: beta + + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + rust: nightly-2018-03-07 + install: + - mdbook --help || cargo install mdbook --force + script: + - cargo test + - cargo doc --no-deps + - (cd src/doc && mdbook build --dest-dir ../../target/doc) + + exclude: + - rust: stable + +before_script: + - rustup target add $ALT +script: + - cargo test + +notifications: + email: + on_success: never + +addons: + apt: + packages: + - gcc-multilib diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 000000000..68ce7e6cf --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,105 @@ +# Cargo Architecture + +This document gives a high level overview of Cargo internals. You may +find it useful if you want to contribute to Cargo or if you are +interested in the inner workings of Cargo. + + +## Subcommands + +Cargo is organized as a set of `clap` subcommands. All subcommands live in +`src/bin/commands` directory. `src/bin/cargo.rs` is the entry point. + +A typical subcommand, such as `src/bin/commands/build.rs`, parses command line +options, reads the configuration files, discovers the Cargo project in +the current directory and delegates the actual implementation to one +of the functions in `src/cargo/ops/mod.rs`. This short file is a good +place to find out about most of the things that Cargo can do. + + +## Important Data Structures + +There are some important data structures which are used throughout +Cargo. + +`Config` is available almost everywhere and holds "global" +information, such as `CARGO_HOME` or configuration from +`.cargo/config` files. The `shell` method of `Config` is the entry +point for printing status messages and other info to the console. + +`Workspace` is the description of the workspace for the current +working directory. Each workspace contains at least one +`Package`. Each package corresponds to a single `Cargo.toml`, and may +define several `Target`s, such as the library, binaries, integration +test or examples. Targets are crates (each target defines a crate +root, like `src/lib.rs` or `examples/foo.rs`) and are what is actually +compiled by `rustc`. + +A typical package defines the single library target and several +auxiliary ones. Packages are a unit of dependency in Cargo, and when +package `foo` depends on package `bar`, that means that each target +from `foo` needs the library target from `bar`. + +`PackageId` is the unique identifier of a (possibly remote) +package. It consist of three components: name, version and source +id. Source is the place where the source code for package comes +from. Typical sources are crates.io, a git repository or a folder on +the local hard drive. + +`Resolve` is the representation of a directed acyclic graph of package +dependencies, which uses `PackageId`s for nodes. This is the data +structure that is saved to the lock file. If there is no lockfile, +Cargo constructs a resolve by finding a graph of packages which +matches declared dependency specification according to semver. + + +## Persistence + +Cargo is a non-daemon command line application, which means that all +the information used by Cargo must be persisted on the hard drive. The +main sources of information are `Cargo.toml` and `Cargo.lock` files, +`.cargo/config` configuration files and the globally shared registry +of packages downloaded from crates.io, usually located at +`~/.cargo/registry`. See `src/sources/registry` for the specifics of +the registry storage format. + + +## Concurrency + +Cargo is mostly single threaded. The only concurrency inside a single +instance of Cargo happens during compilation, when several instances +of `rustc` are invoked in parallel to build independent +targets. However there can be several different instances of Cargo +process running concurrently on the system. Cargo guarantees that this +is always safe by using file locks when accessing potentially shared +data like the registry or the target directory. + + +## Tests + +Cargo has an impressive test suite located in the `tests` folder. Most +of the test are integration: a project structure with `Cargo.toml` and +rust source code is created in a temporary directory, `cargo` binary +is invoked via `std::process::Command` and then stdout and stderr are +verified against the expected output. To simplify testing, several +macros of the form `[MACRO]` are used in the expected output. For +example, `[..]` matches any string and `[/]` matches `/` on Unixes and +`\` on windows. + +To see stdout and stderr streams of the subordinate process, add `.stream()` +call to `execs()`: + +```rust +// Before +assert_that( + p.cargo("run"), + execs().with_status(0) +); + +// After +assert_that( + p.cargo("run"), + execs().stream().with_status(0) +); + +``` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..55b29fe15 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,180 @@ +# Contributing to Cargo + +Thank you for your interest in contributing to Cargo! Good places to +start are this document, [ARCHITECTURE.md](ARCHITECTURE.md), which +describes the high-level structure of Cargo and [E-easy] bugs on the +issue tracker. + +If you have a general question about Cargo or it's internals, feel free to ask +on [IRC]. + +## Code of Conduct + +All contributors are expected to follow our [Code of Conduct]. + +## Bug reports + +We can't fix what we don't know about, so please report problems liberally. This +includes problems with understanding the documentation, unhelpful error messages +and unexpected behavior. + +**If you think that you have identified an issue with Cargo that might compromise +its users' security, please do not open a public issue on GitHub. Instead, +we ask you to refer to Rust's [security policy].** + +Opening an issue is as easy as following [this +link][new-issues] and filling out the fields. +Here's a template that you can use to file an issue, though it's not necessary to +use it exactly: + + + + I tried this: + + I expected to see this happen: + + Instead, this happened: + + I'm using + +All three components are important: what you did, what you expected, what +happened instead. Please use https://gist.github.com/ if your examples run long. + +## Working on issues + +If you're looking for somewhere to start, check out the [E-easy][E-Easy] tag. + +Feel free to ask for guidelines on how to tackle a problem on [IRC] or open a +[new issue][new-issues]. This is especially important if you want to add new +features to Cargo or make large changes to the already existing code-base. +Cargo's core developers will do their best to provide help. + +If you start working on an already-filed issue, post a comment on this issue to +let people know that somebody is working it. Feel free to ask for comments if +you are unsure about the solution you would like to submit. + +While Cargo does make use of some Rust-features available only through the +`nightly` toolchain, it must compile on stable Rust. Code added to Cargo +is encouraged to make use of the latest stable features of the language and +`stdlib`. + +We use the "fork and pull" model [described here][development-models], where +contributors push changes to their personal fork and create pull requests to +bring those changes into the source repository. This process is partly +automated: Pull requests are made against Cargo's master-branch, tested and +reviewed. Once a change is approved to be merged, a friendly bot merges the +changes into an internal branch, runs the full test-suite on that branch +and only then merges into master. This ensures that Cargo's master branch +passes the test-suite at all times. + +Your basic steps to get going: + +* Fork Cargo and create a branch from master for the issue you are working on. +* Please adhere to the code style that you see around the location you are +working on. +* [Commit as you go][githelp]. +* Include tests that cover all non-trivial code. The existing tests +in `test/` provide templates on how to test Cargo's behavior in a +sandbox-environment. The internal crate `cargotest` provides a vast amount +of helpers to minimize boilerplate. +* Make sure `cargo test` passes. If you do not have the cross-compilers +installed locally, ignore the cross-compile test failures or disable them by +using `CFG_DISABLE_CROSS_TESTS=1 cargo test`. Note that some tests are enabled +only on `nightly` toolchain. If you can, test both toolchains. +* All code is expected to comply with the formatting suggested by `rustfmt`; +discrepancy is considered an error by CI, so a pull request with unsound +formatting will not be accepted. You can use `rustup component add rustfmt-preview` +to install `rustfmt` and use `cargo fmt` to automatically format your code. +* Push your commits to GitHub and create a pull request against Cargo's +`master` branch. + +## Pull requests + +After the pull request is made, a friendly bot will automatically assign a +reviewer; the review-process will make sure that the proposed changes are +sound. Please give the assigned reviewer sufficient time, especially during +weekends. If you don't get a reply, you may poke the core developers on [IRC]. + +A merge of Cargo's master-branch and your changes is immediately queued +to be tested after the pull request is made. In case unforeseen +problems are discovered during this step (e.g. a failure on a platform you +originally did not develop on), you may ask for guidance. Push additional +commits to your branch to tackle these problems. + +The reviewer might point out changes deemed necessary. Please add them as +extra commits; this ensures that the reviewer can see what has changed since +the code was previously reviewed. Large or tricky changes may require several +passes of review and changes. + +Once the reviewer approves your pull request, a friendly bot picks it up +and [merges][mergequeue] it into Cargo's `master` branch. + +## Contributing to the documentation + +To contribute to the documentation, all you need to do is change the markdown +files in the `src/doc` directory. To view the rendered version of changes you +have made locally, make sure you have `mdbook` installed and run: + +```sh +cd src/doc +mdbook build +open book/index.html +``` + +To install `mdbook` run `cargo install mdbook`. + + +## Issue Triage + +Sometimes an issue will stay open, even though the bug has been fixed. And +sometimes, the original bug may go stale because something has changed in the +meantime. + +It can be helpful to go through older bug reports and make sure that they are +still valid. Load up an older issue, double check that it's still true, and +leave a comment letting us know if it is or is not. The [least recently +updated sort][lru] is good for finding issues like this. + +Contributors with sufficient permissions on the Rust-repository can help by +adding labels to triage issues: + +* Yellow, **A**-prefixed labels state which **area** of the project an issue + relates to. + +* Magenta, **B**-prefixed labels identify bugs which are **blockers**. + +* Light purple, **C**-prefixed labels represent the **category** of an issue. + +* Dark purple, **Command**-prefixed labels mean the issue has to do with a + specific cargo command. + +* Green, **E**-prefixed labels explain the level of **experience** or + **effort** necessary to fix the issue. [**E-mentor**][E-mentor] issues also + have some instructions on how to get started. + +* Red, **I**-prefixed labels indicate the **importance** of the issue. The + [I-nominated][inom] label indicates that an issue has been nominated for + prioritizing at the next triage meeting. + +* Purple gray, **O**-prefixed labels are the **operating system** or platform + that this issue is specific to. + +* Orange, **P**-prefixed labels indicate a bug's **priority**. These labels + are only assigned during triage meetings and replace the [I-nominated][inom] + label. + +* The light orange **relnotes** label marks issues that should be documented in + the release notes of the next release. + + +[githelp]: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html +[development-models]: https://help.github.com/articles/about-collaborative-development-models/ +[gist]: https://gist.github.com/ +[new-issues]: https://github.com/rust-lang/cargo/issues/new +[mergequeue]: https://buildbot2.rust-lang.org/homu/queue/cargo +[security policy]: https://www.rust-lang.org/security.html +[lru]: https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-asc +[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy +[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor +[Code of Conduct]: https://www.rust-lang.org/conduct.html +[IRC]: https://kiwiirc.com/client/irc.mozilla.org/cargo diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..6a37ad3e7 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,92 @@ +[package] +name = "cargo" +version = "0.27.0" +authors = ["Yehuda Katz ", + "Carl Lerche ", + "Alex Crichton "] +license = "MIT OR Apache-2.0" +homepage = "https://crates.io" +repository = "https://github.com/rust-lang/cargo" +documentation = "https://docs.rs/cargo" +description = """ +Cargo, a package manager for Rust. +""" + +[lib] +name = "cargo" +path = "src/cargo/lib.rs" + +[dependencies] +atty = "0.2" +crates-io = { path = "src/crates-io", version = "0.16" } +crossbeam = "0.3" +crypto-hash = "0.3" +curl = "0.4.6" +env_logger = "0.5" +failure = "0.1.1" +filetime = "0.1" +flate2 = "1.0" +fs2 = "0.4" +git2 = "0.7.0" +git2-curl = "0.8" +glob = "0.2" +hex = "0.3" +home = "0.3" +ignore = "0.4" +lazy_static = "1.0.0" +jobserver = "0.1.9" +lazycell = "0.6" +libc = "0.2" +libgit2-sys = "0.7" +log = "0.4" +num_cpus = "1.0" +same-file = "1" +semver = { version = "0.9.0", features = ["serde"] } +serde = "1.0" +serde_derive = "1.0" +serde_ignored = "0.0.4" +serde_json = "1.0" +shell-escape = "0.1" +tar = { version = "0.4", default-features = false } +tempfile = "3.0" +termcolor = "0.3" +toml = "0.4" +url = "1.1" +clap = "2.31.2" + +# Not actually needed right now but required to make sure that rls/cargo build +# with the same set of features in rust-lang/rust +num-traits = "0.2" # enable the default feature + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation = { version = "0.5.1", features = ["mac_os_10_7_support"] } + +[target.'cfg(windows)'.dependencies] +miow = "0.3" + +[target.'cfg(windows)'.dependencies.winapi] +version = "0.3" +features = [ + "handleapi", + "jobapi", + "jobapi2", + "minwindef", + "ntdef", + "ntstatus", + "processenv", + "processthreadsapi", + "psapi", + "synchapi", + "winerror", + "winbase", + "wincon", + "winnt", +] + +[dev-dependencies] +bufstream = "0.1" + +[[bin]] +name = "cargo" +test = false +doc = false diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 000000000..31aa79387 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY new file mode 100644 index 000000000..c9897b96f --- /dev/null +++ b/LICENSE-THIRD-PARTY @@ -0,0 +1,1272 @@ +The Cargo source code itself does not bundle any third party libraries, but it +depends on a number of libraries which carry their own copyright notices and +license terms. These libraries are normally all linked static into the binary +distributions of Cargo: + +* OpenSSL - http://www.openssl.org/source/license.html + + Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. All advertising materials mentioning features or use of this + software must display the following acknowledgment: + "This product includes software developed by the OpenSSL Project + for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + + 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + endorse or promote products derived from this software without + prior written permission. For written permission, please contact + openssl-core@openssl.org. + + 5. Products derived from this software may not be called "OpenSSL" + nor may "OpenSSL" appear in their names without prior written + permission of the OpenSSL Project. + + 6. Redistributions of any form whatsoever must retain the following + acknowledgment: + "This product includes software developed by the OpenSSL Project + for use in the OpenSSL Toolkit (http://www.openssl.org/)" + + THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + OF THE POSSIBILITY OF SUCH DAMAGE. + ==================================================================== + + This product includes cryptographic software written by Eric Young + (eay@cryptsoft.com). This product includes software written by Tim + Hudson (tjh@cryptsoft.com). + + --- + + Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + All rights reserved. + + This package is an SSL implementation written + by Eric Young (eay@cryptsoft.com). + The implementation was written so as to conform with Netscapes SSL. + + This library is free for commercial and non-commercial use as long as + the following conditions are aheared to. The following conditions + apply to all code found in this distribution, be it the RC4, RSA, + lhash, DES, etc., code; not just the SSL code. The SSL documentation + included with this distribution is covered by the same copyright terms + except that the holder is Tim Hudson (tjh@cryptsoft.com). + + Copyright remains Eric Young's, and as such any Copyright notices in + the code are not to be removed. + If this package is used in a product, Eric Young should be given attribution + as the author of the parts of the library used. + This can be in the form of a textual message at program startup or + in documentation (online or textual) provided with the package. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + 1. Redistributions of source code must retain the copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. All advertising materials mentioning features or use of this software + must display the following acknowledgement: + "This product includes cryptographic software written by + Eric Young (eay@cryptsoft.com)" + The word 'cryptographic' can be left out if the rouines from the library + being used are not cryptographic related :-). + 4. If you include any Windows specific code (or a derivative thereof) from + the apps directory (application code) you must include an acknowledgement: + "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + + THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + The licence and distribution terms for any publically available version or + derivative of this code cannot be changed. i.e. this code cannot simply be + copied and put under another distribution licence + [including the GNU Public Licence.] + +* libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING + + libgit2 is Copyright (C) the libgit2 contributors, + unless otherwise stated. See the AUTHORS file for details. + + Note that the only valid version of the GPL as far as this project + is concerned is _this_ particular version of the license (ie v2, not + v2.2 or v3.x or whatever), unless explicitly otherwise stated. + + ---------------------------------------------------------------------- + + LINKING EXCEPTION + + In addition to the permissions in the GNU General Public License, + the authors give you unlimited permission to link the compiled + version of this library into combinations with other programs, + and to distribute those combinations without any restriction + coming from the use of this file. (The General Public License + restrictions do apply in other respects; for example, they cover + modification of the file, and distribution when not linked into + a combined executable.) + + ---------------------------------------------------------------------- + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + License is intended to guarantee your freedom to share and change free + software--to make sure the software is free for all its users. This + General Public License applies to most of the Free Software + Foundation's software and to any other program whose authors commit to + using it. (Some other Free Software Foundation software is covered by + the GNU Library General Public License instead.) You can apply it to + your programs, too. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + this service if you wish), that you receive source code or can get it + if you want it, that you can change the software or use pieces of it + in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid + anyone to deny you these rights or to ask you to surrender the rights. + These restrictions translate to certain responsibilities for you if you + distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether + gratis or for a fee, you must give the recipients all the rights that + you have. You must make sure that they, too, receive or can get the + source code. And you must show them these terms so they know their + rights. + + We protect your rights with two steps: (1) copyright the software, and + (2) offer you this license which gives you legal permission to copy, + distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain + that everyone understands that there is no warranty for this free + software. If the software is modified by someone else and passed on, we + want its recipients to know that what they have is not the original, so + that any problems introduced by others will not reflect on the original + authors' reputations. + + Finally, any free program is threatened constantly by software + patents. We wish to avoid the danger that redistributors of a free + program will individually obtain patent licenses, in effect making the + program proprietary. To prevent this, we have made it clear that any + patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and + modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains + a notice placed by the copyright holder saying it may be distributed + under the terms of this General Public License. The "Program", below, + refers to any such program or work, and a "work based on the Program" + means either the Program or any derivative work under copyright law: + that is to say, a work containing the Program or a portion of it, + either verbatim or with modifications and/or translated into another + language. (Hereinafter, translation is included without limitation in + the term "modification".) Each licensee is addressed as "you". + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running the Program is not restricted, and the output from the Program + is covered only if its contents constitute a work based on the + Program (independent of having been made by running the Program). + Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's + source code as you receive it, in any medium, provided that you + conspicuously and appropriately publish on each copy an appropriate + copyright notice and disclaimer of warranty; keep intact all the + notices that refer to this License and to the absence of any warranty; + and give any other recipients of the Program a copy of this License + along with the Program. + + You may charge a fee for the physical act of transferring a copy, and + you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion + of it, thus forming a work based on the Program, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Program, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Program, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Program. + + In addition, mere aggregation of another work not based on the Program + with the Program (or with a work based on the Program) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, + under Section 2) in object code or executable form under the terms of + Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + + The source code for a work means the preferred form of the work for + making modifications to it. For an executable work, complete source + code means all the source code for all modules it contains, plus any + associated interface definition files, plus the scripts used to + control compilation and installation of the executable. However, as a + special exception, the source code distributed need not include + anything that is normally distributed (in either source or binary + form) with the major components (compiler, kernel, and so on) of the + operating system on which the executable runs, unless that component + itself accompanies the executable. + + If distribution of executable or object code is made by offering + access to copy from a designated place, then offering equivalent + access to copy the source code from the same place counts as + distribution of the source code, even though third parties are not + compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program + except as expressly provided under this License. Any attempt + otherwise to copy, modify, sublicense or distribute the Program is + void, and will automatically terminate your rights under this License. + However, parties who have received copies, or rights, from you under + this License will not have their licenses terminated so long as such + parties remain in full compliance. + + 5. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Program or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Program (or any work based on the + Program), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the + original licensor to copy, distribute or modify the Program subject to + these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties to + this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Program at all. For example, if a patent + license would not permit royalty-free redistribution of the Program by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Program. + + If any portion of this section is held invalid or unenforceable under + any particular circumstance, the balance of the section is intended to + apply and the section as a whole is intended to apply in other + circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system, which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Program under this License + may add an explicit geographical distribution limitation excluding + those countries, so that distribution is permitted only in or among + countries not thus excluded. In such case, this License incorporates + the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions + of the General Public License from time to time. Such new versions will + be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free + programs whose distribution conditions are different, write to the author + to ask for permission. For software which is copyrighted by the Free + Software Foundation, write to the Free Software Foundation; we sometimes + make exceptions for this. Our decision will be guided by the two goals + of preserving the free status of all derivatives of our free software and + of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY + FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS + TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE + PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, + REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED + TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY + YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest + possible use to the public, the best way to achieve this is to make it + free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest + to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least + the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + + Also add information on how to contact you by electronic and paper mail. + + If the program is interactive, make it output a short notice like this + when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + + The hypothetical commands `show w' and `show c' should show the appropriate + parts of the General Public License. Of course, the commands you use may + be called something other than `show w' and `show c'; they could even be + mouse-clicks or menu items--whatever suits your program. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the program, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + + This General Public License does not permit incorporating your program into + proprietary programs. If your program is a subroutine library, you may + consider it more useful to permit linking proprietary applications with the + library. If this is what you want to do, use the GNU Library General + Public License instead of this License. + + ---------------------------------------------------------------------- + + The bundled ZLib code is licensed under the ZLib license: + + Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + + ---------------------------------------------------------------------- + + The Clar framework is licensed under the MIT license: + + Copyright (C) 2011 by Vicent Marti + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + ---------------------------------------------------------------------- + + The regex library (deps/regex/) is licensed under the GNU LGPL + + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + [This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + Licenses are intended to guarantee your freedom to share and change + free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some + specially designated software packages--typically libraries--of the + Free Software Foundation and other authors who decide to use it. You + can use it too, but we suggest you first think carefully about whether + this license or the ordinary General Public License is the better + strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, + not price. Our General Public Licenses are designed to make sure that + you have the freedom to distribute copies of free software (and charge + for this service if you wish); that you receive source code or can get + it if you want it; that you can change the software and use pieces of + it in new free programs; and that you are informed that you can do + these things. + + To protect your rights, we need to make restrictions that forbid + distributors to deny you these rights or to ask you to surrender these + rights. These restrictions translate to certain responsibilities for + you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis + or for a fee, you must give the recipients all the rights that we gave + you. You must make sure that they, too, receive or can get the source + code. If you link other code with the library, you must provide + complete object files to the recipients, so that they can relink them + with the library after making changes to the library and recompiling + it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the + library, and (2) we offer you this license, which gives you legal + permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that + there is no warranty for the free library. Also, if the library is + modified by someone else and passed on, the recipients should know + that what they have is not the original version, so that the original + author's reputation will not be affected by problems that might be + introduced by others. + + Finally, software patents pose a constant threat to the existence of + any free program. We wish to make sure that a company cannot + effectively restrict the users of a free program by obtaining a + restrictive license from a patent holder. Therefore, we insist that + any patent license obtained for a version of the library must be + consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the + ordinary GNU General Public License. This license, the GNU Lesser + General Public License, applies to certain designated libraries, and + is quite different from the ordinary General Public License. We use + this license for certain libraries in order to permit linking those + libraries into non-free programs. + + When a program is linked with a library, whether statically or using + a shared library, the combination of the two is legally speaking a + combined work, a derivative of the original library. The ordinary + General Public License therefore permits such linking only if the + entire combination fits its criteria of freedom. The Lesser General + Public License permits more lax criteria for linking other code with + the library. + + We call this license the "Lesser" General Public License because it + does Less to protect the user's freedom than the ordinary General + Public License. It also provides other free software developers Less + of an advantage over competing non-free programs. These disadvantages + are the reason we use the ordinary General Public License for many + libraries. However, the Lesser license provides advantages in certain + special circumstances. + + For example, on rare occasions, there may be a special need to + encourage the widest possible use of a certain library, so that it becomes + a de-facto standard. To achieve this, non-free programs must be + allowed to use the library. A more frequent case is that a free + library does the same job as widely used non-free libraries. In this + case, there is little to gain by limiting the free library to free + software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free + programs enables a greater number of people to use a large body of + free software. For example, permission to use the GNU C Library in + non-free programs enables many more people to use the whole GNU + operating system, as well as its variant, the GNU/Linux operating + system. + + Although the Lesser General Public License is Less protective of the + users' freedom, it does ensure that the user of a program that is + linked with the Library has the freedom and the wherewithal to run + that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and + modification follow. Pay close attention to the difference between a + "work based on the library" and a "work that uses the library". The + former contains code derived from the library, whereas the latter must + be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other + program which contains a notice placed by the copyright holder or + other authorized party saying it may be distributed under the terms of + this Lesser General Public License (also called "this License"). + Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data + prepared so as to be conveniently linked with application programs + (which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work + which has been distributed under these terms. A "work based on the + Library" means either the Library or any derivative work under + copyright law: that is to say, a work containing the Library or a + portion of it, either verbatim or with modifications and/or translated + straightforwardly into another language. (Hereinafter, translation is + included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for + making modifications to it. For a library, complete source code means + all the source code for all modules it contains, plus any associated + interface definition files, plus the scripts used to control compilation + and installation of the library. + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running a program using the Library is not restricted, and output from + such a program is covered only if its contents constitute a work based + on the Library (independent of the use of the Library in a tool for + writing it). Whether that is true depends on what the Library does + and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's + complete source code as you receive it, in any medium, provided that + you conspicuously and appropriately publish on each copy an + appropriate copyright notice and disclaimer of warranty; keep intact + all the notices that refer to this License and to the absence of any + warranty; and distribute a copy of this License along with the + Library. + + You may charge a fee for the physical act of transferring a copy, + and you may at your option offer warranty protection in exchange for a + fee. + + 2. You may modify your copy or copies of the Library or any portion + of it, thus forming a work based on the Library, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Library, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Library, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote + it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Library. + + In addition, mere aggregation of another work not based on the Library + with the Library (or with a work based on the Library) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public + License instead of this License to a given copy of the Library. To do + this, you must alter all the notices that refer to this License, so + that they refer to the ordinary GNU General Public License, version 2, + instead of to this License. (If a newer version than version 2 of the + ordinary GNU General Public License has appeared, then you can specify + that version instead if you wish.) Do not make any other change in + these notices. + + Once this change is made in a given copy, it is irreversible for + that copy, so the ordinary GNU General Public License applies to all + subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of + the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or + derivative of it, under Section 2) in object code or executable form + under the terms of Sections 1 and 2 above provided that you accompany + it with the complete corresponding machine-readable source code, which + must be distributed under the terms of Sections 1 and 2 above on a + medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy + from a designated place, then offering equivalent access to copy the + source code from the same place satisfies the requirement to + distribute the source code, even though third parties are not + compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the + Library, but is designed to work with the Library by being compiled or + linked with it, is called a "work that uses the Library". Such a + work, in isolation, is not a derivative work of the Library, and + therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library + creates an executable that is a derivative of the Library (because it + contains portions of the Library), rather than a "work that uses the + library". The executable is therefore covered by this License. + Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file + that is part of the Library, the object code for the work may be a + derivative work of the Library even though the source code is not. + Whether this is true is especially significant if the work can be + linked without the Library, or if the work is itself a library. The + threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data + structure layouts and accessors, and small macros and small inline + functions (ten lines or less in length), then the use of the object + file is unrestricted, regardless of whether it is legally a derivative + work. (Executables containing this object code plus portions of the + Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may + distribute the object code for the work under the terms of Section 6. + Any executables containing that work also fall under Section 6, + whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or + link a "work that uses the Library" with the Library to produce a + work containing portions of the Library, and distribute that work + under terms of your choice, provided that the terms permit + modification of the work for the customer's own use and reverse + engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the + Library is used in it and that the Library and its use are covered by + this License. You must supply a copy of this License. If the work + during execution displays copyright notices, you must include the + copyright notice for the Library among them, as well as a reference + directing the user to the copy of this License. Also, you must do one + of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the + Library" must include any data and utility programs needed for + reproducing the executable from it. However, as a special exception, + the materials to be distributed need not include anything that is + normally distributed (in either source or binary form) with the major + components (compiler, kernel, and so on) of the operating system on + which the executable runs, unless that component itself accompanies + the executable. + + It may happen that this requirement contradicts the license + restrictions of other proprietary libraries that do not normally + accompany the operating system. Such a contradiction means you cannot + use both them and the Library together in an executable that you + distribute. + + 7. You may place library facilities that are a work based on the + Library side-by-side in a single library together with other library + facilities not covered by this License, and distribute such a combined + library, provided that the separate distribution of the work based on + the Library and of the other library facilities is otherwise + permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute + the Library except as expressly provided under this License. Any + attempt otherwise to copy, modify, sublicense, link with, or + distribute the Library is void, and will automatically terminate your + rights under this License. However, parties who have received copies, + or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Library or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Library (or any work based on the + Library), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the + Library), the recipient automatically receives a license from the + original licensor to copy, distribute, link with or modify the Library + subject to these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties with + this License. + + 11. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Library at all. For example, if a patent + license would not permit royalty-free redistribution of the Library by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Library. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply, + and the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Library under this License may add + an explicit geographical distribution limitation excluding those countries, + so that distribution is permitted only in or among countries not thus + excluded. In such case, this License incorporates the limitation as if + written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new + versions of the Lesser General Public License from time to time. + Such new versions will be similar in spirit to the present version, + but may differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the Library + specifies a version number of this License which applies to it and + "any later version", you have the option of following the terms and + conditions either of that version or of any later version published by + the Free Software Foundation. If the Library does not specify a + license version number, you may choose any version ever published by + the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free + programs whose distribution conditions are incompatible with these, + write to the author to ask for permission. For software which is + copyrighted by the Free Software Foundation, write to the Free + Software Foundation; we sometimes make exceptions for this. Our + decision will be guided by the two goals of preserving the free status + of all derivatives of our free software and of promoting the sharing + and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO + WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. + EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR + OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE + LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME + THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN + WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY + AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU + FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR + CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE + LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING + RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A + FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF + SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest + possible use to the public, we recommend making it free software that + everyone can redistribute and change. You can do so by permitting + redistribution under these terms (or, alternatively, under the terms of the + ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is + safest to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least the + "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + + Also add information on how to contact you by electronic and paper mail. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the library, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + + That's all there is to it! + + ---------------------------------------------------------------------- + +* libssh2 - http://www.libssh2.org/license.html + + Copyright (c) 2004-2007 Sara Golemon + Copyright (c) 2005,2006 Mikhail Gusarov + Copyright (c) 2006-2007 The Written Word, Inc. + Copyright (c) 2007 Eli Fant + Copyright (c) 2009 Daniel Stenberg + Copyright (C) 2008, 2009 Simon Josefsson + All rights reserved. + + Redistribution and use in source and binary forms, + with or without modification, are permitted provided + that the following conditions are met: + + Redistributions of source code must retain the above + copyright notice, this list of conditions and the + following disclaimer. + + Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + Neither the name of the copyright holder nor the names + of any other contributors may be used to endorse or + promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + OF SUCH DAMAGE. + +* libcurl - http://curl.haxx.se/docs/copyright.html + + COPYRIGHT AND PERMISSION NOTICE + + Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se. + + All rights reserved. + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + USE OR OTHER DEALINGS IN THE SOFTWARE. + + Except as contained in this notice, the name of a copyright holder shall not + be used in advertising or otherwise to promote the sale, use or other + dealings in this Software without prior written authorization of the + copyright holder. + +* flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT +* link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT +* openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT +* toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT +* libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT +* git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT +* tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT + + Copyright (c) 2014 Alex Crichton + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT +* semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT + + Copyright (c) 2014 The Rust Project Developers + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT + + Copyright (c) 2006-2009 Graydon Hoare + Copyright (c) 2009-2013 Mozilla Foundation + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +* rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt + + The MIT License (MIT) + + Copyright (c) 2013, Kang Seonghoon. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +* curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE + + Copyright (c) 2014 Carl Lerche + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +* docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE + + This is free and unencumbered software released into the public domain. + + Anyone is free to copy, modify, publish, use, compile, sell, or + distribute this software, either in source code form or as a compiled + binary, for any purpose, commercial or non-commercial, and by any + means. + + In jurisdictions that recognize copyright laws, the author or authors + of this software dedicate any and all copyright interest in the + software to the public domain. We make this dedication for the benefit + of the public at large and to the detriment of our heirs and + successors. We intend this dedication to be an overt act of + relinquishment in perpetuity of all present and future rights to this + software under copyright law. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + For more information, please refer to + diff --git a/README.md b/README.md new file mode 100644 index 000000000..fe87f154e --- /dev/null +++ b/README.md @@ -0,0 +1,90 @@ +# Cargo + +Cargo downloads your Rust project’s dependencies and compiles your project. + +Learn more at https://doc.rust-lang.org/cargo/ + +## Code Status + +[![Build Status](https://travis-ci.org/rust-lang/cargo.svg?branch=master)](https://travis-ci.org/rust-lang/cargo) +[![Build Status](https://ci.appveyor.com/api/projects/status/github/rust-lang/cargo?branch=master&svg=true)](https://ci.appveyor.com/project/rust-lang-libs/cargo) + +Code documentation: https://docs.rs/cargo/ + +## Installing Cargo + +Cargo is distributed by default with Rust, so if you've got `rustc` installed +locally you probably also have `cargo` installed locally. + +## Compiling from Source + +Cargo requires the following tools and packages to build: + +* `python` +* `curl` (on Unix) +* `cmake` +* OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu) +* `cargo` and `rustc` + +First, you'll want to check out this repository + +``` +git clone https://github.com/rust-lang/cargo +cd cargo +``` + +With `cargo` already installed, you can simply run: + +``` +cargo build --release +``` + +## Adding new subcommands to Cargo + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. See [the Wiki page][third-party-subcommands] for more details and +a list of known community-developed subcommands. + +[third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands + + +## Releases + +High level release notes are available as part of [Rust's release notes][rel]. +Cargo releases coincide with Rust releases. + +[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md + +## Reporting issues + +Found a bug? We'd love to know about it! + +Please report all issues on the github [issue tracker][issues]. + +[issues]: https://github.com/rust-lang/cargo/issues + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md). You may also find the arhitecture +documentation useful ([ARCHITECTURE.md](ARCHITECTURE.md)). + +## License + +Cargo is primarily distributed under the terms of both the MIT license +and the Apache License (Version 2.0). + +See LICENSE-APACHE and LICENSE-MIT for details. + +### Third party software + +This product includes software developed by the OpenSSL Project +for use in the OpenSSL Toolkit (http://www.openssl.org/). + +In binary form, this product includes software that is licensed under the +terms of the GNU General Public License, version 2, with a linking exception, +which can be obtained from the [upstream repository][1]. + +See LICENSE-THIRD-PARTY for details. + +[1]: https://github.com/libgit2/libgit2 + diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 000000000..d85f19d22 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,21 @@ +environment: + matrix: + - TARGET: x86_64-pc-windows-msvc + OTHER_TARGET: i686-pc-windows-msvc + MAKE_TARGETS: test-unit-x86_64-pc-windows-msvc + +install: + - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe + - rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly-2018-03-07 + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - rustup target add %OTHER_TARGET% + - rustc -V + - cargo -V + - git submodule update --init + +clone_depth: 1 + +build: false + +test_script: + - cargo test diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs new file mode 100644 index 000000000..7d7c7db6e --- /dev/null +++ b/src/bin/cargo.rs @@ -0,0 +1,212 @@ +extern crate cargo; +extern crate clap; +extern crate env_logger; +#[macro_use] +extern crate failure; +extern crate git2_curl; +extern crate log; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; +extern crate toml; + +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; +use std::collections::BTreeSet; + +use cargo::core::shell::Shell; +use cargo::util::{self, lev_distance, CargoResult, CliResult, Config}; +use cargo::util::{CliError, ProcessError}; + +mod cli; +mod command_prelude; +mod commands; + +fn main() { + env_logger::init(); + + let mut config = match Config::default() { + Ok(cfg) => cfg, + Err(e) => { + let mut shell = Shell::new(); + cargo::exit_with_error(e.into(), &mut shell) + } + }; + + let result = { + init_git_transports(&mut config); + let _token = cargo::util::job::setup(); + cli::main(&mut config) + }; + + match result { + Err(e) => cargo::exit_with_error(e, &mut *config.shell()), + Ok(()) => {} + } +} + +fn aliased_command(config: &Config, command: &str) -> CargoResult>> { + let alias_name = format!("alias.{}", command); + let mut result = Ok(None); + match config.get_string(&alias_name) { + Ok(value) => { + if let Some(record) = value { + let alias_commands = record + .val + .split_whitespace() + .map(|s| s.to_string()) + .collect(); + result = Ok(Some(alias_commands)); + } + } + Err(_) => { + let value = config.get_list(&alias_name)?; + if let Some(record) = value { + let alias_commands: Vec = + record.val.iter().map(|s| s.0.to_string()).collect(); + result = Ok(Some(alias_commands)); + } + } + } + result +} + +/// List all runnable commands +fn list_commands(config: &Config) -> BTreeSet<(String, Option)> { + let prefix = "cargo-"; + let suffix = env::consts::EXE_SUFFIX; + let mut commands = BTreeSet::new(); + for dir in search_directories(config) { + let entries = match fs::read_dir(dir) { + Ok(entries) => entries, + _ => continue, + }; + for entry in entries.filter_map(|e| e.ok()) { + let path = entry.path(); + let filename = match path.file_name().and_then(|s| s.to_str()) { + Some(filename) => filename, + _ => continue, + }; + if !filename.starts_with(prefix) || !filename.ends_with(suffix) { + continue; + } + if is_executable(entry.path()) { + let end = filename.len() - suffix.len(); + commands.insert(( + filename[prefix.len()..end].to_string(), + Some(path.display().to_string()), + )); + } + } + } + + for cmd in commands::builtin() { + commands.insert((cmd.get_name().to_string(), None)); + } + + commands +} + +fn find_closest(config: &Config, cmd: &str) -> Option { + let cmds = list_commands(config); + // Only consider candidates with a lev_distance of 3 or less so we don't + // suggest out-of-the-blue options. + let mut filtered = cmds.iter() + .map(|&(ref c, _)| (lev_distance(c, cmd), c)) + .filter(|&(d, _)| d < 4) + .collect::>(); + filtered.sort_by(|a, b| a.0.cmp(&b.0)); + filtered.get(0).map(|slot| slot.1.clone()) +} + +fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult { + let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); + let path = search_directories(config) + .iter() + .map(|dir| dir.join(&command_exe)) + .find(|file| is_executable(file)); + let command = match path { + Some(command) => command, + None => { + let err = match find_closest(config, cmd) { + Some(closest) => format_err!( + "no such subcommand: `{}`\n\n\tDid you mean `{}`?\n", + cmd, + closest + ), + None => format_err!("no such subcommand: `{}`", cmd), + }; + return Err(CliError::new(err, 101)); + } + }; + + let cargo_exe = config.cargo_exe()?; + let err = match util::process(&command) + .env(cargo::CARGO_ENV, cargo_exe) + .args(args) + .exec_replace() + { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if let Some(perr) = err.downcast_ref::() { + if let Some(code) = perr.exit.as_ref().and_then(|c| c.code()) { + return Err(CliError::code(code)); + } + } + Err(CliError::new(err, 101)) +} + +#[cfg(unix)] +fn is_executable>(path: P) -> bool { + use std::os::unix::prelude::*; + fs::metadata(path) + .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0) + .unwrap_or(false) +} +#[cfg(windows)] +fn is_executable>(path: P) -> bool { + fs::metadata(path) + .map(|metadata| metadata.is_file()) + .unwrap_or(false) +} + +fn search_directories(config: &Config) -> Vec { + let mut dirs = vec![config.home().clone().into_path_unlocked().join("bin")]; + if let Some(val) = env::var_os("PATH") { + dirs.extend(env::split_paths(&val)); + } + dirs +} + +fn init_git_transports(config: &Config) { + // Only use a custom transport if any HTTP options are specified, + // such as proxies or custom certificate authorities. The custom + // transport, however, is not as well battle-tested. + + match cargo::ops::needs_custom_http_transport(config) { + Ok(true) => {} + _ => return, + } + + let handle = match cargo::ops::http_handle(config) { + Ok(handle) => handle, + Err(..) => return, + }; + + // The unsafety of the registration function derives from two aspects: + // + // 1. This call must be synchronized with all other registration calls as + // well as construction of new transports. + // 2. The argument is leaked. + // + // We're clear on point (1) because this is only called at the start of this + // binary (we know what the state of the world looks like) and we're mostly + // clear on point (2) because we'd only free it after everything is done + // anyway + unsafe { + git2_curl::register(handle); + } +} diff --git a/src/bin/cli.rs b/src/bin/cli.rs new file mode 100644 index 000000000..da5f4eb59 --- /dev/null +++ b/src/bin/cli.rs @@ -0,0 +1,173 @@ +extern crate clap; + +use clap::{AppSettings, Arg, ArgMatches}; + +use cargo::{self, CliResult, Config}; + +use super::list_commands; +use super::commands; +use command_prelude::*; + +pub fn main(config: &mut Config) -> CliResult { + let args = cli().get_matches_safe()?; + let is_verbose = args.occurrences_of("verbose") > 0; + if args.is_present("version") { + let version = cargo::version(); + println!("{}", version); + if is_verbose { + println!( + "release: {}.{}.{}", + version.major, version.minor, version.patch + ); + if let Some(ref cfg) = version.cfg_info { + if let Some(ref ci) = cfg.commit_info { + println!("commit-hash: {}", ci.commit_hash); + println!("commit-date: {}", ci.commit_date); + } + } + } + return Ok(()); + } + + if let Some(ref code) = args.value_of("explain") { + let mut procss = config.rustc()?.process(); + procss.arg("--explain").arg(code).exec()?; + return Ok(()); + } + + if args.is_present("list") { + println!("Installed Commands:"); + for command in list_commands(config) { + let (command, path) = command; + if is_verbose { + match path { + Some(p) => println!(" {:<20} {}", command, p), + None => println!(" {:<20}", command), + } + } else { + println!(" {}", command); + } + } + return Ok(()); + } + + if args.subcommand_name().is_none() {} + + execute_subcommand(config, args) +} + +fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult { + config.configure( + args.occurrences_of("verbose") as u32, + if args.is_present("quiet") { + Some(true) + } else { + None + }, + &args.value_of("color").map(|s| s.to_string()), + args.is_present("frozen"), + args.is_present("locked"), + &args.values_of_lossy("unstable-features") + .unwrap_or_default(), + )?; + + let (cmd, args) = match args.subcommand() { + (cmd, Some(args)) => (cmd, args), + _ => { + cli().print_help()?; + return Ok(()); + } + }; + + if let Some(exec) = commands::builtin_exec(cmd) { + return exec(config, args); + } + + if let Some(mut alias) = super::aliased_command(config, cmd)? { + alias.extend( + args.values_of("") + .unwrap_or_default() + .map(|s| s.to_string()), + ); + let args = cli() + .setting(AppSettings::NoBinaryName) + .get_matches_from_safe(alias)?; + return execute_subcommand(config, args); + } + let mut ext_args: Vec<&str> = vec![cmd]; + ext_args.extend(args.values_of("").unwrap_or_default()); + super::execute_external_subcommand(config, cmd, &ext_args) +} + +fn cli() -> App { + let app = App::new("cargo") + .settings(&[ + AppSettings::UnifiedHelpMessage, + AppSettings::DeriveDisplayOrder, + AppSettings::VersionlessSubcommands, + AppSettings::AllowExternalSubcommands, + ]) + .about("") + .template( + "\ +Rust's package manager + +USAGE: + {usage} + +OPTIONS: +{unified} + +Some common cargo commands are (see all commands with --list): + build Compile the current project + check Analyze the current project and report errors, but don't build object files + clean Remove the target directory + doc Build this project's and its dependencies' documentation + new Create a new cargo project + init Create a new cargo project in an existing directory + run Build and execute src/main.rs + test Run the tests + bench Run the benchmarks + update Update dependencies listed in Cargo.lock + search Search registry for crates + publish Package and upload this project to the registry + install Install a Rust binary + uninstall Uninstall a Rust binary + +See 'cargo help ' for more information on a specific command.\n", + ) + .arg(opt("version", "Print version info and exit").short("V")) + .arg(opt("list", "List installed commands")) + .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE")) + .arg( + opt( + "verbose", + "Use verbose output (-vv very verbose/build.rs output)", + ).short("v") + .multiple(true) + .global(true), + ) + .arg( + opt("quiet", "No output printed to stdout") + .short("q") + .global(true), + ) + .arg( + opt("color", "Coloring: auto, always, never") + .value_name("WHEN") + .global(true), + ) + .arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) + .arg(opt("locked", "Require Cargo.lock is up to date").global(true)) + .arg( + Arg::with_name("unstable-features") + .help("Unstable (nightly-only) flags to Cargo") + .short("Z") + .value_name("FLAG") + .multiple(true) + .number_of_values(1) + .global(true), + ) + .subcommands(commands::builtin()); + app +} diff --git a/src/bin/command_prelude.rs b/src/bin/command_prelude.rs new file mode 100644 index 000000000..bc738a052 --- /dev/null +++ b/src/bin/command_prelude.rs @@ -0,0 +1,393 @@ +use std::path::PathBuf; +use std::fs; + +use clap::{self, SubCommand}; +use cargo::CargoResult; +use cargo::core::Workspace; +use cargo::ops::{CompileFilter, CompileMode, CompileOptions, MessageFormat, NewOptions, Packages, + VersionControl}; +use cargo::util::paths; +use cargo::util::important_paths::find_root_manifest_for_wd; + +pub use clap::{AppSettings, Arg, ArgMatches}; +pub use cargo::{CliError, CliResult, Config}; + +pub type App = clap::App<'static, 'static>; + +pub trait AppExt: Sized { + fn _arg(self, arg: Arg<'static, 'static>) -> Self; + + fn arg_package_spec( + self, + package: &'static str, + all: &'static str, + exclude: &'static str, + ) -> Self { + self.arg_package_spec_simple(package) + ._arg(opt("all", all)) + ._arg(multi_opt("exclude", "SPEC", exclude)) + } + + fn arg_package_spec_simple(self, package: &'static str) -> Self { + self._arg(multi_opt("package", "SPEC", package).short("p")) + } + + fn arg_package(self, package: &'static str) -> Self { + self._arg(opt("package", package).short("p").value_name("SPEC")) + } + + fn arg_jobs(self) -> Self { + self._arg( + opt("jobs", "Number of parallel jobs, defaults to # of CPUs") + .short("j") + .value_name("N"), + ) + } + + fn arg_targets_all( + self, + lib: &'static str, + bin: &'static str, + bins: &'static str, + examle: &'static str, + examles: &'static str, + test: &'static str, + tests: &'static str, + bench: &'static str, + benchs: &'static str, + all: &'static str, + ) -> Self { + self.arg_targets_lib_bin(lib, bin, bins) + ._arg(multi_opt("example", "NAME", examle)) + ._arg(opt("examples", examles)) + ._arg(multi_opt("test", "NAME", test)) + ._arg(opt("tests", tests)) + ._arg(multi_opt("bench", "NAME", bench)) + ._arg(opt("benches", benchs)) + ._arg(opt("all-targets", all)) + } + + fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self { + self._arg(opt("lib", lib)) + ._arg(multi_opt("bin", "NAME", bin)) + ._arg(opt("bins", bins)) + } + + fn arg_targets_bins_examples( + self, + bin: &'static str, + bins: &'static str, + example: &'static str, + examples: &'static str, + ) -> Self { + self._arg(multi_opt("bin", "NAME", bin)) + ._arg(opt("bins", bins)) + ._arg(multi_opt("example", "NAME", example)) + ._arg(opt("examples", examples)) + } + + fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self { + self._arg(multi_opt("bin", "NAME", bin)) + ._arg(multi_opt("example", "NAME", example)) + } + + fn arg_features(self) -> Self { + self._arg( + opt("features", "Space-separated list of features to activate").value_name("FEATURES"), + )._arg(opt("all-features", "Activate all available features")) + ._arg(opt( + "no-default-features", + "Do not activate the `default` feature", + )) + } + + fn arg_release(self, release: &'static str) -> Self { + self._arg(opt("release", release)) + } + + fn arg_target_triple(self, target: &'static str) -> Self { + self._arg(opt("target", target).value_name("TRIPLE")) + } + + fn arg_manifest_path(self) -> Self { + self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH")) + } + + fn arg_message_format(self) -> Self { + self._arg( + opt("message-format", "Error format") + .value_name("FMT") + .case_insensitive(true) + .possible_values(&["human", "json"]) + .default_value("human"), + ) + } + + fn arg_new_opts(self) -> Self { + self._arg( + opt( + "vcs", + "\ + Initialize a new repository for the given version \ + control system (git, hg, pijul, or fossil) or do not \ + initialize any version control at all (none), overriding \ + a global configuration.", + ).value_name("VCS") + .possible_values(&["git", "hg", "pijul", "fossil", "none"]), + )._arg(opt("bin", "Use a binary (application) template [default]")) + ._arg(opt("lib", "Use a library template")) + ._arg( + opt( + "name", + "Set the resulting package name, defaults to the directory name", + ).value_name("NAME"), + ) + } + + fn arg_index(self) -> Self { + self._arg(opt("index", "Registry index to upload the package to").value_name("INDEX")) + ._arg( + opt("host", "DEPRECATED, renamed to '--index'") + .value_name("HOST") + .hidden(true), + ) + } +} + +impl AppExt for App { + fn _arg(self, arg: Arg<'static, 'static>) -> Self { + self.arg(arg) + } +} + +pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> { + Arg::with_name(name).long(name).help(help) +} + +pub fn multi_opt( + name: &'static str, + value_name: &'static str, + help: &'static str, +) -> Arg<'static, 'static> { + // Note that all `.multiple(true)` arguments in Cargo should specify + // `.number_of_values(1)` as well, so that `--foo val1 val2` is + // **not** parsed as `foo` with values ["val1", "val2"]. + // `number_of_values` should become the default in clap 3. + opt(name, help) + .value_name(value_name) + .multiple(true) + .number_of_values(1) +} + +pub fn subcommand(name: &'static str) -> App { + SubCommand::with_name(name).settings(&[ + AppSettings::UnifiedHelpMessage, + AppSettings::DeriveDisplayOrder, + AppSettings::DontCollapseArgsInUsage, + ]) +} + +pub trait ArgMatchesExt { + fn value_of_u32(&self, name: &str) -> CargoResult> { + let arg = match self._value_of(name) { + None => None, + Some(arg) => Some(arg.parse::().map_err(|_| { + clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg)) + })?), + }; + Ok(arg) + } + + /// Returns value of the `name` command-line argument as an absolute path + fn value_of_path(&self, name: &str, config: &Config) -> Option { + self._value_of(name).map(|path| config.cwd().join(path)) + } + + fn root_manifest(&self, config: &Config) -> CargoResult { + if let Some(path) = self.value_of_path("manifest-path", config) { + // In general, we try to avoid normalizing paths in Cargo, + // but in this particular case we need it to fix #3586. + let path = paths::normalize_path(&path); + if !path.ends_with("Cargo.toml") { + bail!("the manifest-path must be a path to a Cargo.toml file") + } + if !fs::metadata(&path).is_ok() { + bail!( + "manifest path `{}` does not exist", + self._value_of("manifest-path").unwrap() + ) + } + return Ok(path); + } + find_root_manifest_for_wd(config.cwd()) + } + + fn workspace<'a>(&self, config: &'a Config) -> CargoResult> { + let root = self.root_manifest(config)?; + let mut ws = Workspace::new(&root, config)?; + if config.cli_unstable().avoid_dev_deps { + ws.set_require_optional_deps(false); + } + Ok(ws) + } + + fn jobs(&self) -> CargoResult> { + self.value_of_u32("jobs") + } + + fn target(&self) -> Option { + self._value_of("target").map(|s| s.to_string()) + } + + fn compile_options<'a>( + &self, + config: &'a Config, + mode: CompileMode, + ) -> CargoResult> { + let spec = Packages::from_flags( + self._is_present("all"), + self._values_of("exclude"), + self._values_of("package"), + )?; + + let message_format = match self._value_of("message-format") { + None => MessageFormat::Human, + Some(f) => { + if f.eq_ignore_ascii_case("json") { + MessageFormat::Json + } else if f.eq_ignore_ascii_case("human") { + MessageFormat::Human + } else { + panic!("Impossible message format: {:?}", f) + } + } + }; + + let opts = CompileOptions { + config, + jobs: self.jobs()?, + target: self.target(), + features: self._values_of("features"), + all_features: self._is_present("all-features"), + no_default_features: self._is_present("no-default-features"), + spec, + mode, + release: self._is_present("release"), + filter: CompileFilter::new( + self._is_present("lib"), + self._values_of("bin"), + self._is_present("bins"), + self._values_of("test"), + self._is_present("tests"), + self._values_of("example"), + self._is_present("examples"), + self._values_of("bench"), + self._is_present("benches"), + self._is_present("all-targets"), + ), + message_format, + target_rustdoc_args: None, + target_rustc_args: None, + }; + Ok(opts) + } + + fn compile_options_for_single_package<'a>( + &self, + config: &'a Config, + mode: CompileMode, + ) -> CargoResult> { + let mut compile_opts = self.compile_options(config, mode)?; + compile_opts.spec = Packages::Packages(self._values_of("package")); + Ok(compile_opts) + } + + fn new_options(&self, config: &Config) -> CargoResult { + let vcs = self._value_of("vcs").map(|vcs| match vcs { + "git" => VersionControl::Git, + "hg" => VersionControl::Hg, + "pijul" => VersionControl::Pijul, + "fossil" => VersionControl::Fossil, + "none" => VersionControl::NoVcs, + vcs => panic!("Impossible vcs: {:?}", vcs), + }); + NewOptions::new( + vcs, + self._is_present("bin"), + self._is_present("lib"), + self.value_of_path("path", config).unwrap(), + self._value_of("name").map(|s| s.to_string()), + ) + } + + fn registry(&self, config: &Config) -> CargoResult> { + match self._value_of("registry") { + Some(registry) => { + if !config.cli_unstable().unstable_options { + return Err(format_err!( + "registry option is an unstable feature and \ + requires -Zunstable-options to use." + ).into()); + } + Ok(Some(registry.to_string())) + } + None => Ok(None), + } + } + + fn index(&self, config: &Config) -> CargoResult> { + // TODO: Deprecated + // remove once it has been decided --host can be removed + // We may instead want to repurpose the host flag, as + // mentioned in this issue + // https://github.com/rust-lang/cargo/issues/4208 + let msg = "The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning."; + + let index = match self._value_of("host") { + Some(host) => { + config.shell().warn(&msg)?; + Some(host.to_string()) + } + None => self._value_of("index").map(|s| s.to_string()), + }; + Ok(index) + } + + fn _value_of(&self, name: &str) -> Option<&str>; + + fn _values_of(&self, name: &str) -> Vec; + + fn _is_present(&self, name: &str) -> bool; +} + +impl<'a> ArgMatchesExt for ArgMatches<'a> { + fn _value_of(&self, name: &str) -> Option<&str> { + self.value_of(name) + } + + fn _values_of(&self, name: &str) -> Vec { + self.values_of(name) + .unwrap_or_default() + .map(|s| s.to_string()) + .collect() + } + + fn _is_present(&self, name: &str) -> bool { + self.is_present(name) + } +} + +pub fn values(args: &ArgMatches, name: &str) -> Vec { + args.values_of(name) + .unwrap_or_default() + .map(|s| s.to_string()) + .collect() +} diff --git a/src/bin/commands/bench.rs b/src/bin/commands/bench.rs new file mode 100644 index 000000000..01676b515 --- /dev/null +++ b/src/bin/commands/bench.rs @@ -0,0 +1,101 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode, TestOptions}; + +pub fn cli() -> App { + subcommand("bench") + .setting(AppSettings::TrailingVarArg) + .about("Execute all benchmarks of a local package") + .arg( + Arg::with_name("BENCHNAME") + .help("If specified, only run benches containing this string in their names"), + ) + .arg( + Arg::with_name("args") + .help("Arguments for the bench binary") + .multiple(true) + .last(true), + ) + .arg_targets_all( + "Benchmark only this package's library", + "Benchmark only the specified binary", + "Benchmark all binaries", + "Benchmark only the specified example", + "Benchmark all examples", + "Benchmark only the specified test target", + "Benchmark all tests", + "Benchmark only the specified bench target", + "Benchmark all benches", + "Benchmark all targets (default)", + ) + .arg(opt("no-run", "Compile, but don't run benchmarks")) + .arg_package_spec( + "Package to run benchmarks for", + "Benchmark all packages in the workspace", + "Exclude packages from the benchmark", + ) + .arg_jobs() + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_message_format() + .arg(opt( + "no-fail-fast", + "Run all benchmarks regardless of failure", + )) + .after_help( + "\ +All of the trailing arguments are passed to the benchmark binaries generated +for filtering benchmarks and generally providing options configuring how they +run. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be benchmarked. If it is not given, then +the current package is benchmarked. For more information on SPEC and its format, +see the `cargo help pkgid` command. + +All packages in the workspace are benchmarked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The --jobs argument affects the building of the benchmark executable but does +not affect how many jobs are used when running the benchmarks. + +Compilation can be customized with the `bench` profile in the manifest. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let mut compile_opts = args.compile_options(config, CompileMode::Bench)?; + compile_opts.release = true; + + let ops = TestOptions { + no_run: args.is_present("no-run"), + no_fail_fast: args.is_present("no-fail-fast"), + only_doc: false, + compile_opts, + }; + + let mut bench_args = vec![]; + bench_args.extend( + args.value_of("BENCHNAME") + .into_iter() + .map(|s| s.to_string()), + ); + bench_args.extend( + args.values_of("args") + .unwrap_or_default() + .map(|s| s.to_string()), + ); + + let err = ops::run_benches(&ws, &ops, &bench_args)?; + match err { + None => Ok(()), + Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new(format_err!("bench failed"), i), + None => CliError::new(err.into(), 101), + }), + } +} diff --git a/src/bin/commands/build.rs b/src/bin/commands/build.rs new file mode 100644 index 000000000..2a772e03d --- /dev/null +++ b/src/bin/commands/build.rs @@ -0,0 +1,55 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode}; + +pub fn cli() -> App { + subcommand("build") + .alias("b") + .about("Compile a local package and all of its dependencies") + .arg_package_spec( + "Package to build", + "Build all packages in the workspace", + "Exclude packages from the build", + ) + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets (lib and bin targets by default)", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are built if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the --release flag will use the `release` profile instead. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let compile_opts = args.compile_options(config, CompileMode::Build)?; + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/commands/check.rs b/src/bin/commands/check.rs new file mode 100644 index 000000000..45361740d --- /dev/null +++ b/src/bin/commands/check.rs @@ -0,0 +1,71 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode}; + +pub fn cli() -> App { + subcommand("check") + .about("Check a local package and all of its dependencies for errors") + .arg_package_spec( + "Package(s) to check", + "Check all packages in the workspace", + "Exclude packages from the check", + ) + .arg_jobs() + .arg_targets_all( + "Check only this package's library", + "Check only the specified binary", + "Check all binaries", + "Check only the specified example", + "Check all examples", + "Check only the specified test target", + "Check all tests", + "Check only the specified bench target", + "Check all benches", + "Check all targets (lib and bin targets by default)", + ) + .arg_release("Check artifacts in release mode, with optimizations") + .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE")) + .arg_features() + .arg_target_triple("Check for the target triple") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are checked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the --release flag will use the `release` profile instead. + +The `--profile test` flag can be used to check unit tests with the +`#[cfg(test)]` attribute. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let test = match args.value_of("profile") { + Some("test") => true, + None => false, + Some(profile) => { + let err = format_err!( + "unknown profile: `{}`, only `test` is \ + currently supported", + profile + ); + return Err(CliError::new(err, 101)); + } + }; + let mode = CompileMode::Check { test }; + let compile_opts = args.compile_options(config, mode)?; + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/commands/clean.rs b/src/bin/commands/clean.rs new file mode 100644 index 000000000..8d0928cc2 --- /dev/null +++ b/src/bin/commands/clean.rs @@ -0,0 +1,32 @@ +use command_prelude::*; + +use cargo::ops::{self, CleanOptions}; + +pub fn cli() -> App { + subcommand("clean") + .about("Remove artifacts that cargo has generated in the past") + .arg_package_spec_simple("Package to clean artifacts for") + .arg_manifest_path() + .arg_target_triple("Target triple to clean output for (default all)") + .arg_release("Whether or not to clean release artifacts") + .after_help( + "\ +If the --package argument is given, then SPEC is a package id specification +which indicates which package's artifacts should be cleaned out. If it is not +given, then all packages' artifacts are removed. For more information on SPEC +and its format, see the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let opts = CleanOptions { + config, + spec: values(args, "package"), + target: args.target(), + release: args.is_present("release"), + }; + ops::clean(&ws, &opts)?; + Ok(()) +} diff --git a/src/bin/commands/doc.rs b/src/bin/commands/doc.rs new file mode 100644 index 000000000..a6a254675 --- /dev/null +++ b/src/bin/commands/doc.rs @@ -0,0 +1,58 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode, DocOptions}; + +pub fn cli() -> App { + subcommand("doc") + .about("Build a package's documentation") + .arg(opt( + "open", + "Opens the docs in a browser after the operation", + )) + .arg_package_spec( + "Package to document", + "Document all packages in the workspace", + "Exclude packages from the build", + ) + .arg(opt("no-deps", "Don't build documentation for dependencies")) + .arg_jobs() + .arg_targets_lib_bin( + "Document only this package's library", + "Document only the specified binary", + "Document all binaries", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +By default the documentation for the local package and all dependencies is +built. The output is all placed in `target/doc` in rustdoc's usual format. + +All packages in the workspace are documented if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let mode = CompileMode::Doc { + deps: !args.is_present("no-deps"), + }; + let compile_opts = args.compile_options(config, mode)?; + let doc_opts = DocOptions { + open_result: args.is_present("open"), + compile_opts, + }; + ops::doc(&ws, &doc_opts)?; + Ok(()) +} diff --git a/src/bin/commands/fetch.rs b/src/bin/commands/fetch.rs new file mode 100644 index 000000000..642653fda --- /dev/null +++ b/src/bin/commands/fetch.rs @@ -0,0 +1,27 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("fetch") + .about("Fetch dependencies of a package from the network") + .arg_manifest_path() + .after_help( + "\ +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + ops::fetch(&ws)?; + Ok(()) +} diff --git a/src/bin/commands/generate_lockfile.rs b/src/bin/commands/generate_lockfile.rs new file mode 100644 index 000000000..f730872be --- /dev/null +++ b/src/bin/commands/generate_lockfile.rs @@ -0,0 +1,27 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("generate-lockfile") + .about("Generate the lockfile for a project") + .arg_manifest_path() + .after_help( + "\ +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + ops::generate_lockfile(&ws)?; + Ok(()) +} diff --git a/src/bin/commands/git_checkout.rs b/src/bin/commands/git_checkout.rs new file mode 100644 index 000000000..a9401f105 --- /dev/null +++ b/src/bin/commands/git_checkout.rs @@ -0,0 +1,36 @@ +use command_prelude::*; + +use cargo::core::{GitReference, Source, SourceId}; +use cargo::sources::GitSource; +use cargo::util::ToUrl; + +pub fn cli() -> App { + subcommand("git-checkout") + .about("Checkout a copy of a Git repository") + .arg( + Arg::with_name("url") + .long("url") + .value_name("URL") + .required(true), + ) + .arg( + Arg::with_name("reference") + .long("reference") + .value_name("REF") + .required(true), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let url = args.value_of("url").unwrap().to_url()?; + let reference = args.value_of("reference").unwrap(); + + let reference = GitReference::Branch(reference.to_string()); + let source_id = SourceId::for_git(&url, reference)?; + + let mut source = GitSource::new(&source_id, config)?; + + source.update()?; + + Ok(()) +} diff --git a/src/bin/commands/init.rs b/src/bin/commands/init.rs new file mode 100644 index 000000000..c32dead4d --- /dev/null +++ b/src/bin/commands/init.rs @@ -0,0 +1,19 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("init") + .about("Create a new cargo package in an existing directory") + .arg(Arg::with_name("path").default_value(".")) + .arg_new_opts() +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let opts = args.new_options(config)?; + ops::init(&opts, config)?; + config + .shell() + .status("Created", format!("{} project", opts.kind))?; + Ok(()) +} diff --git a/src/bin/commands/install.rs b/src/bin/commands/install.rs new file mode 100644 index 000000000..a353a2821 --- /dev/null +++ b/src/bin/commands/install.rs @@ -0,0 +1,118 @@ +use command_prelude::*; + +use cargo::core::{GitReference, SourceId}; +use cargo::ops::{self, CompileMode}; +use cargo::util::ToUrl; + +pub fn cli() -> App { + subcommand("install") + .about("Install a Rust binary") + .arg(Arg::with_name("crate").empty_values(false).multiple(true)) + .arg( + opt("version", "Specify a version to install from crates.io") + .alias("vers") + .value_name("VERSION"), + ) + .arg(opt("git", "Git URL to install the specified crate from").value_name("URL")) + .arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH")) + .arg(opt("tag", "Tag to use when installing from git").value_name("TAG")) + .arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA")) + .arg(opt("path", "Filesystem path to local crate to install").value_name("PATH")) + .arg(opt( + "list", + "list all installed packages and their versions", + )) + .arg_jobs() + .arg(opt("force", "Force overwriting existing crates or binaries").short("f")) + .arg_features() + .arg(opt("debug", "Build in debug mode instead of release mode")) + .arg_targets_bins_examples( + "Install only the specified binary", + "Install all binaries", + "Install only the specified example", + "Install all examples", + ) + .arg(opt("root", "Directory to install packages into").value_name("DIR")) + .after_help( + "\ +This command manages Cargo's local set of installed binary crates. Only packages +which have [[bin]] targets can be installed, and all binaries are installed into +the installation root's `bin` folder. The installation root is determined, in +order of precedence, by `--root`, `$CARGO_INSTALL_ROOT`, the `install.root` +configuration key, and finally the home directory (which is either +`$CARGO_HOME` if set or `$HOME/.cargo` by default). + +There are multiple sources from which a crate can be installed. The default +location is crates.io but the `--git` and `--path` flags can change this source. +If the source contains more than one package (such as crates.io or a git +repository with multiple crates) the `` argument is required to indicate +which crate should be installed. + +Crates from crates.io can optionally specify the version they wish to install +via the `--vers` flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the `--bin` argument can selectively install only +one of them, and if you'd rather install examples the `--example` argument can +be used as well. + +By default cargo will refuse to overwrite existing binaries. The `--force` flag +enables overwriting existing binaries. Thus you can reinstall a crate with +`cargo install --force `. + +As a special convenience, omitting the specification entirely will +install the crate in the current directory. That is, `install` is equivalent to +the more explicit `install --path .`. + +If the source is crates.io or `--git` then by default the crate will be built +in a temporary target directory. To avoid this, the target directory can be +specified by setting the `CARGO_TARGET_DIR` environment variable to a relative +path. In particular, this can be useful for caching build artifacts on +continuous integration systems.", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let mut compile_opts = args.compile_options(config, CompileMode::Build)?; + compile_opts.release = !args.is_present("debug"); + + let krates = args.values_of("crate") + .unwrap_or_default() + .collect::>(); + + let source = if let Some(url) = args.value_of("git") { + let url = url.to_url()?; + let gitref = if let Some(branch) = args.value_of("branch") { + GitReference::Branch(branch.to_string()) + } else if let Some(tag) = args.value_of("tag") { + GitReference::Tag(tag.to_string()) + } else if let Some(rev) = args.value_of("rev") { + GitReference::Rev(rev.to_string()) + } else { + GitReference::Branch("master".to_string()) + }; + SourceId::for_git(&url, gitref)? + } else if let Some(path) = args.value_of_path("path", config) { + SourceId::for_path(&path)? + } else if krates.is_empty() { + SourceId::for_path(config.cwd())? + } else { + SourceId::crates_io(config)? + }; + + let version = args.value_of("version"); + let root = args.value_of("root"); + + if args.is_present("list") { + ops::install_list(root, config)?; + } else { + ops::install( + root, + krates, + &source, + version, + &compile_opts, + args.is_present("force"), + )?; + } + Ok(()) +} diff --git a/src/bin/commands/locate_project.rs b/src/bin/commands/locate_project.rs new file mode 100644 index 000000000..30cdf506c --- /dev/null +++ b/src/bin/commands/locate_project.rs @@ -0,0 +1,33 @@ +use command_prelude::*; + +use cargo::print_json; + +pub fn cli() -> App { + subcommand("locate-project") + .about("Checkout a copy of a Git repository") + .arg_manifest_path() +} + +#[derive(Serialize)] +pub struct ProjectLocation { + root: String, +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let root = args.root_manifest(config)?; + + let root = root.to_str() + .ok_or_else(|| { + format_err!( + "your project path contains characters \ + not representable in Unicode" + ) + }) + .map_err(|e| CliError::new(e, 1))? + .to_string(); + + let location = ProjectLocation { root }; + + print_json(&location); + Ok(()) +} diff --git a/src/bin/commands/login.rs b/src/bin/commands/login.rs new file mode 100644 index 000000000..199951048 --- /dev/null +++ b/src/bin/commands/login.rs @@ -0,0 +1,58 @@ +use command_prelude::*; + +use std::io::{self, BufRead}; + +use cargo::core::{Source, SourceId}; +use cargo::sources::RegistrySource; +use cargo::util::{CargoError, CargoResultExt}; +use cargo::ops; + +pub fn cli() -> App { + subcommand("login") + .about( + "Save an api token from the registry locally. \ + If token is not specified, it will be read from stdin.", + ) + .arg(Arg::with_name("token")) + .arg(opt("host", "Host to set the token for").value_name("HOST")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let registry = args.registry(config)?; + + let token = match args.value_of("token") { + Some(token) => token.to_string(), + None => { + let host = match registry { + Some(ref _registry) => { + return Err(format_err!( + "token must be provided when \ + --registry is provided." + ).into()); + } + None => { + let src = SourceId::crates_io(config)?; + let mut src = RegistrySource::remote(&src, config); + src.update()?; + let config = src.config()?.unwrap(); + args.value_of("host") + .map(|s| s.to_string()) + .unwrap_or(config.api.unwrap()) + } + }; + println!("please visit {}me and paste the API Token below", host); + let mut line = String::new(); + let input = io::stdin(); + input + .lock() + .read_line(&mut line) + .chain_err(|| "failed to read stdin") + .map_err(CargoError::from)?; + line.trim().to_string() + } + }; + + ops::registry_login(config, token, registry)?; + Ok(()) +} diff --git a/src/bin/commands/metadata.rs b/src/bin/commands/metadata.rs new file mode 100644 index 000000000..b701acd61 --- /dev/null +++ b/src/bin/commands/metadata.rs @@ -0,0 +1,53 @@ +use command_prelude::*; + +use cargo::ops::{self, OutputMetadataOptions}; +use cargo::print_json; + +pub fn cli() -> App { + subcommand("metadata") + .about( + "Output the resolved dependencies of a project, \ + the concrete used versions including overrides, \ + in machine-readable format", + ) + .arg_features() + .arg(opt( + "no-deps", + "Output information only about the root package \ + and don't fetch dependencies", + )) + .arg_manifest_path() + .arg( + opt("format-version", "Format version") + .value_name("VERSION") + .possible_value("1"), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + + let version = match args.value_of("format-version") { + None => { + config.shell().warn( + "\ + please specify `--format-version` flag explicitly \ + to avoid compatibility problems", + )?; + 1 + } + Some(version) => version.parse().unwrap(), + }; + + let options = OutputMetadataOptions { + features: values(args, "features"), + all_features: args.is_present("all-features"), + no_default_features: args.is_present("no-default-features"), + no_deps: args.is_present("no-deps"), + version, + }; + + let result = ops::output_metadata(&ws, &options)?; + print_json(&result); + Ok(()) +} diff --git a/src/bin/commands/mod.rs b/src/bin/commands/mod.rs new file mode 100644 index 000000000..fc829a855 --- /dev/null +++ b/src/bin/commands/mod.rs @@ -0,0 +1,101 @@ +use command_prelude::*; + +pub fn builtin() -> Vec { + vec![ + bench::cli(), + build::cli(), + check::cli(), + clean::cli(), + doc::cli(), + fetch::cli(), + generate_lockfile::cli(), + git_checkout::cli(), + init::cli(), + install::cli(), + locate_project::cli(), + login::cli(), + metadata::cli(), + new::cli(), + owner::cli(), + package::cli(), + pkgid::cli(), + publish::cli(), + read_manifest::cli(), + run::cli(), + rustc::cli(), + rustdoc::cli(), + search::cli(), + test::cli(), + uninstall::cli(), + update::cli(), + verify_project::cli(), + version::cli(), + yank::cli(), + ] +} + +pub fn builtin_exec(cmd: &str) -> Option CliResult> { + let f = match cmd { + "bench" => bench::exec, + "build" => build::exec, + "check" => check::exec, + "clean" => clean::exec, + "doc" => doc::exec, + "fetch" => fetch::exec, + "generate-lockfile" => generate_lockfile::exec, + "git-checkout" => git_checkout::exec, + "init" => init::exec, + "install" => install::exec, + "locate-project" => locate_project::exec, + "login" => login::exec, + "metadata" => metadata::exec, + "new" => new::exec, + "owner" => owner::exec, + "package" => package::exec, + "pkgid" => pkgid::exec, + "publish" => publish::exec, + "read-manifest" => read_manifest::exec, + "run" => run::exec, + "rustc" => rustc::exec, + "rustdoc" => rustdoc::exec, + "search" => search::exec, + "test" => test::exec, + "uninstall" => uninstall::exec, + "update" => update::exec, + "verify-project" => verify_project::exec, + "version" => version::exec, + "yank" => yank::exec, + _ => return None, + }; + Some(f) +} + +pub mod bench; +pub mod build; +pub mod check; +pub mod clean; +pub mod doc; +pub mod fetch; +pub mod generate_lockfile; +pub mod git_checkout; +pub mod init; +pub mod install; +pub mod locate_project; +pub mod login; +pub mod metadata; +pub mod new; +pub mod owner; +pub mod package; +pub mod pkgid; +pub mod publish; +pub mod read_manifest; +pub mod run; +pub mod rustc; +pub mod rustdoc; +pub mod search; +pub mod test; +pub mod uninstall; +pub mod update; +pub mod verify_project; +pub mod version; +pub mod yank; diff --git a/src/bin/commands/new.rs b/src/bin/commands/new.rs new file mode 100644 index 000000000..8b8e60740 --- /dev/null +++ b/src/bin/commands/new.rs @@ -0,0 +1,20 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("new") + .about("Create a new cargo package at ") + .arg(Arg::with_name("path").required(true)) + .arg_new_opts() +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let opts = args.new_options(config)?; + ops::new(&opts, config)?; + let path = args.value_of("path").unwrap(); + config + .shell() + .status("Created", format!("{} `{}` project", opts.kind, path))?; + Ok(()) +} diff --git a/src/bin/commands/owner.rs b/src/bin/commands/owner.rs new file mode 100644 index 000000000..f20be31b1 --- /dev/null +++ b/src/bin/commands/owner.rs @@ -0,0 +1,49 @@ +use command_prelude::*; + +use cargo::ops::{self, OwnersOptions}; + +pub fn cli() -> App { + subcommand("owner") + .about("Manage the owners of a crate on the registry") + .arg(Arg::with_name("crate")) + .arg(multi_opt("add", "LOGIN", "Name of a user or team to add as an owner").short("a")) + .arg( + multi_opt( + "remove", + "LOGIN", + "Name of a user or team to remove as an owner", + ).short("r"), + ) + .arg(opt("list", "List owners of a crate").short("l")) + .arg(opt("index", "Registry index to modify owners for").value_name("INDEX")) + .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .after_help( + "\ + This command will modify the owners for a package + on the specified registry(or + default).Note that owners of a package can upload new versions, yank old + versions.Explicitly named owners can also modify the set of owners, so take + caution! + + See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation + and troubleshooting.", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let registry = args.registry(config)?; + let opts = OwnersOptions { + krate: args.value_of("crate").map(|s| s.to_string()), + token: args.value_of("token").map(|s| s.to_string()), + index: args.value_of("index").map(|s| s.to_string()), + to_add: args.values_of("add") + .map(|xs| xs.map(|s| s.to_string()).collect()), + to_remove: args.values_of("remove") + .map(|xs| xs.map(|s| s.to_string()).collect()), + list: args.is_present("list"), + registry, + }; + ops::modify_owners(config, &opts)?; + Ok(()) +} diff --git a/src/bin/commands/package.rs b/src/bin/commands/package.rs new file mode 100644 index 000000000..bc02332bc --- /dev/null +++ b/src/bin/commands/package.rs @@ -0,0 +1,47 @@ +use command_prelude::*; + +use cargo::ops::{self, PackageOpts}; + +pub fn cli() -> App { + subcommand("package") + .about("Assemble the local package into a distributable tarball") + .arg( + opt( + "list", + "Print files included in a package without making one", + ).short("l"), + ) + .arg(opt( + "no-verify", + "Don't verify the contents by building them", + )) + .arg(opt( + "no-metadata", + "Ignore warnings about a lack of human-usable metadata", + )) + .arg(opt( + "allow-dirty", + "Allow dirty working directories to be packaged", + )) + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_jobs() +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + ops::package( + &ws, + &PackageOpts { + config, + verify: !args.is_present("no-verify"), + list: args.is_present("list"), + check_metadata: !args.is_present("no-metadata"), + allow_dirty: args.is_present("allow-dirty"), + target: args.target(), + jobs: args.jobs()?, + registry: None, + }, + )?; + Ok(()) +} diff --git a/src/bin/commands/pkgid.rs b/src/bin/commands/pkgid.rs new file mode 100644 index 000000000..7010092d6 --- /dev/null +++ b/src/bin/commands/pkgid.rs @@ -0,0 +1,41 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("pkgid") + .about("Print a fully qualified package specification") + .arg(Arg::with_name("spec")) + .arg_package("Argument to get the package id specifier for") + .arg_manifest_path() + .after_help( + "\ +Given a argument, print out the fully qualified package id specifier. +This command will generate an error if is ambiguous as to which package +it refers to in the dependency graph. If no is given, then the pkgid for +the local package is printed. + +This command requires that a lockfile is available and dependencies have been +fetched. + +Example Package IDs + + pkgid | name | version | url + |-----------------------------|--------|-----------|---------------------| + foo | foo | * | * + foo:1.2.3 | foo | 1.2.3 | * + crates.io/foo | foo | * | *://crates.io/foo + crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo + crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar + http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let spec = args.value_of("spec").or(args.value_of("package")); + let spec = ops::pkgid(&ws, spec)?; + println!("{}", spec); + Ok(()) +} diff --git a/src/bin/commands/publish.rs b/src/bin/commands/publish.rs new file mode 100644 index 000000000..fe7ea2a55 --- /dev/null +++ b/src/bin/commands/publish.rs @@ -0,0 +1,45 @@ +use command_prelude::*; + +use cargo::ops::{self, PublishOpts}; + +pub fn cli() -> App { + subcommand("publish") + .about("Upload a package to the registry") + .arg_index() + .arg(opt("token", "Token to use when uploading").value_name("TOKEN")) + .arg(opt( + "no-verify", + "Don't verify the contents by building them", + )) + .arg(opt( + "allow-dirty", + "Allow dirty working directories to be packaged", + )) + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_jobs() + .arg(opt("dry-run", "Perform all checks without uploading")) + .arg(opt("registry", "Registry to publish to").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let registry = args.registry(config)?; + let ws = args.workspace(config)?; + let index = args.index(config)?; + + ops::publish( + &ws, + &PublishOpts { + config, + token: args.value_of("token").map(|s| s.to_string()), + index, + verify: !args.is_present("no-verify"), + allow_dirty: args.is_present("allow-dirty"), + target: args.target(), + jobs: args.jobs()?, + dry_run: args.is_present("dry-run"), + registry, + }, + )?; + Ok(()) +} diff --git a/src/bin/commands/read_manifest.rs b/src/bin/commands/read_manifest.rs new file mode 100644 index 000000000..1e54c79e8 --- /dev/null +++ b/src/bin/commands/read_manifest.rs @@ -0,0 +1,18 @@ +use command_prelude::*; + +use cargo::print_json; + +pub fn cli() -> App { + subcommand("read-manifest") + .about( + "Deprecated, use `cargo metadata --no-deps` instead. +Print a JSON representation of a Cargo.toml manifest.", + ) + .arg_manifest_path() +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + print_json(&ws.current()?); + Ok(()) +} diff --git a/src/bin/commands/run.rs b/src/bin/commands/run.rs new file mode 100644 index 000000000..77e5d8e13 --- /dev/null +++ b/src/bin/commands/run.rs @@ -0,0 +1,68 @@ +use command_prelude::*; + +use cargo::core::Verbosity; +use cargo::ops::{self, CompileFilter, CompileMode}; + +pub fn cli() -> App { + subcommand("run") + .alias("r") + .setting(AppSettings::TrailingVarArg) + .about("Run the main binary of the local package (src/main.rs)") + .arg(Arg::with_name("args").multiple(true)) + .arg_targets_bin_example( + "Name of the bin target to run", + "Name of the example target to run", + ) + .arg_package("Package with the target to run") + .arg_jobs() + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If neither `--bin` nor `--example` are given, then if the project only has one +bin target it will be run. Otherwise `--bin` specifies the bin target to run, +and `--example` specifies the example target to run. At most one of `--bin` or +`--example` can be provided. + +All of the trailing arguments are passed to the binary to run. If you're passing +arguments to both Cargo and the binary, the ones after `--` go to the binary, +the ones before go to Cargo. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + + let mut compile_opts = args.compile_options_for_single_package(config, CompileMode::Build)?; + if !args.is_present("example") && !args.is_present("bin") { + compile_opts.filter = CompileFilter::Default { + required_features_filterable: false, + }; + }; + match ops::run(&ws, &compile_opts, &values(args, "args"))? { + None => Ok(()), + Some(err) => { + // If we never actually spawned the process then that sounds pretty + // bad and we always want to forward that up. + let exit = match err.exit { + Some(exit) => exit, + None => return Err(CliError::new(err.into(), 101)), + }; + + // If `-q` was passed then we suppress extra error information about + // a failed process, we assume the process itself printed out enough + // information about why it failed so we don't do so as well + let exit_code = exit.code().unwrap_or(101); + let is_quiet = config.shell().verbosity() == Verbosity::Quiet; + Err(if is_quiet { + CliError::code(exit_code) + } else { + CliError::new(err.into(), exit_code) + }) + } + } +} diff --git a/src/bin/commands/rustc.rs b/src/bin/commands/rustc.rs new file mode 100644 index 000000000..fb998c911 --- /dev/null +++ b/src/bin/commands/rustc.rs @@ -0,0 +1,73 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode}; + +pub fn cli() -> App { + subcommand("rustc") + .setting(AppSettings::TrailingVarArg) + .about("Compile a package and all of its dependencies") + .arg(Arg::with_name("args").multiple(true)) + .arg_package("Package to build") + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets (lib and bin targets by default)", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE")) + .arg_features() + .arg_target_triple("Target triple which compiles will be for") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +The specified target for the current package (or package specified by SPEC if +provided) will be compiled along with all of its dependencies. The specified +... will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as -L, --extern, and --crate-type, and the specified ... +will simply be added to the compiler invocation. + +This command requires that only one target is being compiled. If more than one +target is available for the current package the filters of --lib, --bin, etc, +must be used to select which target is compiled. To pass flags to all compiler +processes spawned by Cargo, use the $RUSTFLAGS environment variable or the +`build.rustflags` configuration option. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let mode = match args.value_of("profile") { + Some("dev") | None => CompileMode::Build, + Some("test") => CompileMode::Test, + Some("bench") => CompileMode::Bench, + Some("check") => CompileMode::Check { test: false }, + Some(mode) => { + let err = format_err!( + "unknown profile: `{}`, use dev, + test, or bench", + mode + ); + return Err(CliError::new(err, 101)); + } + }; + let mut compile_opts = args.compile_options_for_single_package(config, mode)?; + let target_args = values(args, "args"); + compile_opts.target_rustc_args = if target_args.is_empty() { + None + } else { + Some(target_args) + }; + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/commands/rustdoc.rs b/src/bin/commands/rustdoc.rs new file mode 100644 index 000000000..abd2c7405 --- /dev/null +++ b/src/bin/commands/rustdoc.rs @@ -0,0 +1,65 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode, DocOptions}; + +pub fn cli() -> App { + subcommand("rustdoc") + .setting(AppSettings::TrailingVarArg) + .about("Build a package's documentation, using specified custom flags.") + .arg(Arg::with_name("args").multiple(true)) + .arg(opt( + "open", + "Opens the docs in a browser after the operation", + )) + .arg_package("Package to document") + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets (default)", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +The specified target for the current package (or package specified by SPEC if +provided) will be documented with the specified ... being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as -L, --extern, and --crate-type, and the specified ... will simply be +added to the rustdoc invocation. + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + let mut compile_opts = + args.compile_options_for_single_package(config, CompileMode::Doc { deps: false })?; + let target_args = values(args, "args"); + compile_opts.target_rustdoc_args = if target_args.is_empty() { + None + } else { + Some(target_args) + }; + let doc_opts = DocOptions { + open_result: args.is_present("open"), + compile_opts, + }; + ops::doc(&ws, &doc_opts)?; + Ok(()) +} diff --git a/src/bin/commands/search.rs b/src/bin/commands/search.rs new file mode 100644 index 000000000..0501d8e5f --- /dev/null +++ b/src/bin/commands/search.rs @@ -0,0 +1,30 @@ +use command_prelude::*; + +use std::cmp::min; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("search") + .about("Search packages in crates.io") + .arg(Arg::with_name("query").multiple(true)) + .arg_index() + .arg( + opt( + "limit", + "Limit the number of results (default: 10, max: 100)", + ).value_name("LIMIT"), + ) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let registry = args.registry(config)?; + let index = args.index(config)?; + let limit = args.value_of_u32("limit")?; + let limit = min(100, limit.unwrap_or(10)); + let query: Vec<&str> = args.values_of("query").unwrap_or_default().collect(); + let query: String = query.join("+"); + ops::search(&query, config, index, limit, registry)?; + Ok(()) +} diff --git a/src/bin/commands/test.rs b/src/bin/commands/test.rs new file mode 100644 index 000000000..93db6dccd --- /dev/null +++ b/src/bin/commands/test.rs @@ -0,0 +1,132 @@ +use command_prelude::*; + +use cargo::ops::{self, CompileMode}; + +pub fn cli() -> App { + subcommand("test") + .alias("t") + .setting(AppSettings::TrailingVarArg) + .about("Execute all unit and integration tests of a local package") + .arg( + Arg::with_name("TESTNAME") + .help("If specified, only run tests containing this string in their names"), + ) + .arg( + Arg::with_name("args") + .help("Arguments for the test binary") + .multiple(true) + .last(true), + ) + .arg_targets_all( + "Test only this package's library", + "Test only the specified binary", + "Test all binaries", + "Check that the specified examples compile", + "Check that all examples compile", + "Test only the specified test target", + "Test all tests", + "Test only the specified bench target", + "Test all benches", + "Test all targets (default)", + ) + .arg(opt("doc", "Test only this library's documentation")) + .arg(opt("no-run", "Compile, but don't run tests")) + .arg(opt("no-fail-fast", "Run all tests regardless of failure")) + .arg_package_spec( + "Package to run tests for", + "Test all packages in the workspace", + "Exclude packages from the test", + ) + .arg_jobs() + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +All of the trailing arguments are passed to the test binaries generated for +filtering tests and generally providing options configuring how they run. For +example, this will run all tests with the name `foo` in their name: + + cargo test foo + +If the --package argument is given, then SPEC is a package id specification +which indicates which package should be tested. If it is not given, then the +current package is tested. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are tested if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The --jobs argument affects the building of the test executable but does +not affect how many jobs are used when running the tests. The default value +for the --jobs argument is the number of CPUs. If you want to control the +number of simultaneous running test cases, pass the `--test-threads` option +to the test binaries: + + cargo test -- --test-threads=1 + +Compilation can be configured via the `test` profile in the manifest. + +By default the rust test harness hides output from test execution to +keep results readable. Test output can be recovered (e.g. for debugging) +by passing `--nocapture` to the test binaries: + + cargo test -- --nocapture + +To get the list of all options available for the test binaries use this: + + cargo test -- --help +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + + let mut compile_opts = args.compile_options(config, CompileMode::Test)?; + let doc = args.is_present("doc"); + if doc { + compile_opts.mode = ops::CompileMode::Doctest; + compile_opts.filter = ops::CompileFilter::new( + true, + Vec::new(), + false, + Vec::new(), + false, + Vec::new(), + false, + Vec::new(), + false, + false, + ); + } + + let ops = ops::TestOptions { + no_run: args.is_present("no-run"), + no_fail_fast: args.is_present("no-fail-fast"), + only_doc: doc, + compile_opts, + }; + + // TESTNAME is actually an argument of the test binary, but it's + // important so we explicitly mention it and reconfigure + let mut test_args = vec![]; + test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string())); + test_args.extend( + args.values_of("args") + .unwrap_or_default() + .map(|s| s.to_string()), + ); + + let err = ops::run_tests(&ws, &ops, &test_args)?; + return match err { + None => Ok(()), + Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i), + None => CliError::new(err.into(), 101), + }), + }; +} diff --git a/src/bin/commands/uninstall.rs b/src/bin/commands/uninstall.rs new file mode 100644 index 000000000..203185119 --- /dev/null +++ b/src/bin/commands/uninstall.rs @@ -0,0 +1,26 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("uninstall") + .about("Remove a Rust binary") + .arg(Arg::with_name("spec").multiple(true)) + .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME")) + .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) + .after_help( + "\ +The argument SPEC is a package id specification (see `cargo help pkgid`) to +specify which crate should be uninstalled. By default all binaries are +uninstalled for a crate but the `--bin` and `--example` flags can be used to +only uninstall particular binaries. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let root = args.value_of("root"); + let specs = args.values_of("spec").unwrap_or_default().collect(); + ops::uninstall(root, specs, &values(args, "bin"), config)?; + Ok(()) +} diff --git a/src/bin/commands/update.rs b/src/bin/commands/update.rs new file mode 100644 index 000000000..c5a992a3d --- /dev/null +++ b/src/bin/commands/update.rs @@ -0,0 +1,51 @@ +use command_prelude::*; + +use cargo::ops::{self, UpdateOptions}; + +pub fn cli() -> App { + subcommand("update") + .about("Update dependencies as recorded in the local lock file") + .arg_package_spec_simple("Package to update") + .arg(opt( + "aggressive", + "Force updating all dependencies of as well", + )) + .arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE")) + .arg_manifest_path() + .after_help( + "\ +This command requires that a `Cargo.lock` already exists as generated by +`cargo build` or related commands. + +If SPEC is given, then a conservative update of the lockfile will be +performed. This means that only the dependency specified by SPEC will be +updated. Its transitive dependencies will be updated only if SPEC cannot be +updated without updating dependencies. All other dependencies will remain +locked at their currently recorded versions. + +If PRECISE is specified, then --aggressive must not also be specified. The +argument PRECISE is a string representing a precise revision that the package +being updated should be updated to. For example, if the package comes from a git +repository, then PRECISE would be the exact revision that the repository should +be updated to. + +If SPEC is not given, then all dependencies will be re-resolved and +updated. + +For more information about package id specifications, see `cargo help pkgid`. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let ws = args.workspace(config)?; + + let update_opts = UpdateOptions { + aggressive: args.is_present("aggressive"), + precise: args.value_of("precise"), + to_update: values(args, "package"), + config, + }; + ops::update_lockfile(&ws, &update_opts)?; + Ok(()) +} diff --git a/src/bin/commands/verify_project.rs b/src/bin/commands/verify_project.rs new file mode 100644 index 000000000..eea65c775 --- /dev/null +++ b/src/bin/commands/verify_project.rs @@ -0,0 +1,45 @@ +use command_prelude::*; + +use std::collections::HashMap; +use std::process; +use std::fs::File; +use std::io::Read; + +use toml; + +use cargo::print_json; + +pub fn cli() -> App { + subcommand("verify-project") + .about("Check correctness of crate manifest") + .arg_manifest_path() +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + fn fail(reason: &str, value: &str) -> ! { + let mut h = HashMap::new(); + h.insert(reason.to_string(), value.to_string()); + print_json(&h); + process::exit(1) + } + + let mut contents = String::new(); + let filename = match args.root_manifest(config) { + Ok(filename) => filename, + Err(e) => fail("invalid", &e.to_string()), + }; + + let file = File::open(&filename); + match file.and_then(|mut f| f.read_to_string(&mut contents)) { + Ok(_) => {} + Err(e) => fail("invalid", &format!("error reading file: {}", e)), + }; + if contents.parse::().is_err() { + fail("invalid", "invalid-format"); + } + + let mut h = HashMap::new(); + h.insert("success".to_string(), "true".to_string()); + print_json(&h); + Ok(()) +} diff --git a/src/bin/commands/version.rs b/src/bin/commands/version.rs new file mode 100644 index 000000000..0e9d5be52 --- /dev/null +++ b/src/bin/commands/version.rs @@ -0,0 +1,12 @@ +use command_prelude::*; + +use cargo; + +pub fn cli() -> App { + subcommand("version").about("Show version information") +} + +pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult { + println!("{}", cargo::version()); + Ok(()) +} diff --git a/src/bin/commands/yank.rs b/src/bin/commands/yank.rs new file mode 100644 index 000000000..150474be8 --- /dev/null +++ b/src/bin/commands/yank.rs @@ -0,0 +1,43 @@ +use command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("yank") + .about("Remove a pushed crate from the index") + .arg(Arg::with_name("crate")) + .arg(opt("vers", "The version to yank or un-yank").value_name("VERSION")) + .arg(opt( + "undo", + "Undo a yank, putting a version back into the index", + )) + .arg(opt("index", "Registry index to yank from").value_name("INDEX")) + .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .after_help( + "\ +The yank command removes a previously pushed crate's version from the server's +index. This command does not delete any data, and the crate will still be +available for download via the registry's download link. + +Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { + let registry = args.registry(config)?; + + ops::yank( + config, + args.value_of("crate").map(|s| s.to_string()), + args.value_of("vers").map(|s| s.to_string()), + args.value_of("token").map(|s| s.to_string()), + args.value_of("index").map(|s| s.to_string()), + args.is_present("undo"), + registry, + )?; + Ok(()) +} diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs new file mode 100644 index 000000000..20ef00c0f --- /dev/null +++ b/src/cargo/core/dependency.rs @@ -0,0 +1,413 @@ +use std::fmt; +use std::rc::Rc; +use std::str::FromStr; + +use semver::VersionReq; +use semver::ReqParseError; +use serde::ser; + +use core::{PackageId, SourceId, Summary}; +use core::interning::InternedString; +use util::{Cfg, CfgExpr, Config}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +/// Information about a dependency requested by a Cargo manifest. +/// Cheap to copy. +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)] +pub struct Dependency { + inner: Rc, +} + +/// The data underlying a Dependency. +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)] +struct Inner { + name: InternedString, + source_id: SourceId, + registry_id: Option, + req: VersionReq, + specified_req: bool, + kind: Kind, + only_match_name: bool, + rename: Option, + + optional: bool, + default_features: bool, + features: Vec, + + // This dependency should be used only for this platform. + // `None` means *all platforms*. + platform: Option, +} + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Platform { + Name(String), + Cfg(CfgExpr), +} + +#[derive(Serialize)] +struct SerializedDependency<'a> { + name: &'a str, + source: &'a SourceId, + req: String, + kind: Kind, + rename: Option<&'a str>, + + optional: bool, + uses_default_features: bool, + features: &'a [String], + target: Option<&'a Platform>, +} + +impl ser::Serialize for Dependency { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + SerializedDependency { + name: &*self.name(), + source: self.source_id(), + req: self.version_req().to_string(), + kind: self.kind(), + optional: self.is_optional(), + uses_default_features: self.uses_default_features(), + features: self.features(), + target: self.platform(), + rename: self.rename(), + }.serialize(s) + } +} + +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)] +pub enum Kind { + Normal, + Development, + Build, +} + +fn parse_req_with_deprecated( + req: &str, + extra: Option<(&PackageId, &Config)>, +) -> CargoResult { + match VersionReq::parse(req) { + Err(e) => { + let (inside, config) = match extra { + Some(pair) => pair, + None => return Err(e.into()), + }; + match e { + ReqParseError::DeprecatedVersionRequirement(requirement) => { + let msg = format!( + "\ +parsed version requirement `{}` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of {} {}, and the correct version requirement is `{}`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. +", + req, + inside.name(), + inside.version(), + requirement + ); + config.shell().warn(&msg)?; + + Ok(requirement) + } + e => Err(e.into()), + } + } + Ok(v) => Ok(v), + } +} + +impl ser::Serialize for Kind { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + match *self { + Kind::Normal => None, + Kind::Development => Some("dev"), + Kind::Build => Some("build"), + }.serialize(s) + } +} + +impl Dependency { + /// Attempt to create a `Dependency` from an entry in the manifest. + pub fn parse( + name: &str, + version: Option<&str>, + source_id: &SourceId, + inside: &PackageId, + config: &Config, + ) -> CargoResult { + let arg = Some((inside, config)); + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(v, arg)?), + None => (false, VersionReq::any()), + }; + + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + + /// Attempt to create a `Dependency` from an entry in the manifest. + pub fn parse_no_deprecated( + name: &str, + version: Option<&str>, + source_id: &SourceId, + ) -> CargoResult { + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(v, None)?), + None => (false, VersionReq::any()), + }; + + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + + pub fn new_override(name: &str, source_id: &SourceId) -> Dependency { + assert!(name.len() > 0); + Dependency { + inner: Rc::new(Inner { + name: InternedString::new(name), + source_id: source_id.clone(), + registry_id: None, + req: VersionReq::any(), + kind: Kind::Normal, + only_match_name: true, + optional: false, + features: Vec::new(), + default_features: true, + specified_req: false, + platform: None, + rename: None, + }), + } + } + + pub fn version_req(&self) -> &VersionReq { + &self.inner.req + } + + pub fn name(&self) -> InternedString { + self.inner.name + } + + pub fn source_id(&self) -> &SourceId { + &self.inner.source_id + } + + pub fn registry_id(&self) -> Option<&SourceId> { + self.inner.registry_id.as_ref() + } + + pub fn set_registry_id(&mut self, registry_id: &SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).registry_id = Some(registry_id.clone()); + self + } + + pub fn kind(&self) -> Kind { + self.inner.kind + } + + pub fn specified_req(&self) -> bool { + self.inner.specified_req + } + + /// If none, this dependencies must be built for all platforms. + /// If some, it must only be built for the specified platform. + pub fn platform(&self) -> Option<&Platform> { + self.inner.platform.as_ref() + } + + pub fn rename(&self) -> Option<&str> { + self.inner.rename.as_ref().map(|s| &**s) + } + + pub fn set_kind(&mut self, kind: Kind) -> &mut Dependency { + Rc::make_mut(&mut self.inner).kind = kind; + self + } + + /// Sets the list of features requested for the package. + pub fn set_features(&mut self, features: Vec) -> &mut Dependency { + Rc::make_mut(&mut self.inner).features = features; + self + } + + /// Sets whether the dependency requests default features of the package. + pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).default_features = default_features; + self + } + + /// Sets whether the dependency is optional. + pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).optional = optional; + self + } + + /// Set the source id for this dependency + pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).source_id = id; + self + } + + /// Set the version requirement for this dependency + pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency { + Rc::make_mut(&mut self.inner).req = req; + self + } + + pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { + Rc::make_mut(&mut self.inner).platform = platform; + self + } + + pub fn set_rename(&mut self, rename: &str) -> &mut Dependency { + Rc::make_mut(&mut self.inner).rename = Some(rename.to_string()); + self + } + + /// Lock this dependency to depending on the specified package id + pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency { + assert_eq!(self.inner.source_id, *id.source_id()); + assert!(self.inner.req.matches(id.version())); + trace!( + "locking dep from `{}` with `{}` at {} to {}", + self.name(), + self.version_req(), + self.source_id(), + id + ); + self.set_version_req(VersionReq::exact(id.version())) + .set_source_id(id.source_id().clone()) + } + + /// Returns whether this is a "locked" dependency, basically whether it has + /// an exact version req. + pub fn is_locked(&self) -> bool { + // Kind of a hack to figure this out, but it works! + self.inner.req.to_string().starts_with('=') + } + + /// Returns false if the dependency is only used to build the local package. + pub fn is_transitive(&self) -> bool { + match self.inner.kind { + Kind::Normal | Kind::Build => true, + Kind::Development => false, + } + } + + pub fn is_build(&self) -> bool { + match self.inner.kind { + Kind::Build => true, + _ => false, + } + } + + pub fn is_optional(&self) -> bool { + self.inner.optional + } + + /// Returns true if the default features of the dependency are requested. + pub fn uses_default_features(&self) -> bool { + self.inner.default_features + } + /// Returns the list of features that are requested by the dependency. + pub fn features(&self) -> &[String] { + &self.inner.features + } + + /// Returns true if the package (`sum`) can fulfill this dependency request. + pub fn matches(&self, sum: &Summary) -> bool { + self.matches_id(sum.package_id()) + } + + /// Returns true if the package (`sum`) can fulfill this dependency request. + pub fn matches_ignoring_source(&self, id: &PackageId) -> bool { + self.name() == id.name() && self.version_req().matches(id.version()) + } + + /// Returns true if the package (`id`) can fulfill this dependency request. + pub fn matches_id(&self, id: &PackageId) -> bool { + self.inner.name == id.name() + && (self.inner.only_match_name + || (self.inner.req.matches(id.version()) + && &self.inner.source_id == id.source_id())) + } + + pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency { + if self.source_id() != to_replace { + self + } else { + self.set_source_id(replace_with.clone()); + self + } + } +} + +impl Platform { + pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool { + match *self { + Platform::Name(ref p) => p == name, + Platform::Cfg(ref p) => match cfg { + Some(cfg) => p.matches(cfg), + None => false, + }, + } + } +} + +impl ser::Serialize for Platform { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl FromStr for Platform { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + if s.starts_with("cfg(") && s.ends_with(')') { + let s = &s[4..s.len() - 1]; + let p = s.parse() + .map(Platform::Cfg) + .chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?; + Ok(p) + } else { + Ok(Platform::Name(s.to_string())) + } + } +} + +impl fmt::Display for Platform { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Platform::Name(ref n) => n.fmt(f), + Platform::Cfg(ref e) => write!(f, "cfg({})", e), + } + } +} diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs new file mode 100644 index 000000000..3ce6a8267 --- /dev/null +++ b/src/cargo/core/features.rs @@ -0,0 +1,343 @@ +//! Support for nightly features in Cargo itself +//! +//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo +//! itself and is intended to be the avenue for which new features in Cargo are +//! gated by default and then eventually stabilized. All known stable and +//! unstable features are tracked in this file. +//! +//! If you're reading this then you're likely interested in adding a feature to +//! Cargo, and the good news is that it shouldn't be too hard! To do this you'll +//! want to follow these steps: +//! +//! 1. Add your feature. Do this by searching for "look here" in this file and +//! expanding the macro invocation that lists all features with your new +//! feature. +//! +//! 2. Find the appropriate place to place the feature gate in Cargo itself. If +//! you're extending the manifest format you'll likely just want to modify +//! the `Manifest::feature_gate` function, but otherwise you may wish to +//! place the feature gate elsewhere in Cargo. +//! +//! 3. To actually perform the feature gate, you'll want to have code that looks +//! like: +//! +//! ```rust,ignore +//! use core::{Feature, Features}; +//! +//! let feature = Feature::launch_into_space(); +//! package.manifest().features().require(feature).chain_err(|| { +//! "launching Cargo into space right now is unstable and may result in \ +//! unintended damage to your codebase, use with caution" +//! })?; +//! ``` +//! +//! Notably you'll notice the `require` function called with your `Feature`, and +//! then you use `chain_err` to tack on more context for why the feature was +//! required when the feature isn't activated. +//! +//! And hopefully that's it! Bear with us though that this is, at the time of +//! this writing, a very new feature in Cargo. If the process differs from this +//! we'll be sure to update this documentation! + +use std::env; +use std::fmt; +use std::str::FromStr; + +use util::errors::CargoResult; + +/// The epoch of the compiler (RFC 2052) +#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)] +pub enum Epoch { + /// The 2015 epoch + Epoch2015, + /// The 2018 epoch + Epoch2018, +} + +impl fmt::Display for Epoch { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Epoch::Epoch2015 => f.write_str("2015"), + Epoch::Epoch2018 => f.write_str("2018"), + } + } +} +impl FromStr for Epoch { + type Err = (); + fn from_str(s: &str) -> Result { + match s { + "2015" => Ok(Epoch::Epoch2015), + "2018" => Ok(Epoch::Epoch2018), + _ => Err(()), + } + } +} + +enum Status { + Stable, + Unstable, +} + +macro_rules! features { + ( + pub struct Features { + $([$stab:ident] $feature:ident: bool,)* + } + ) => ( + #[derive(Default, Clone, Debug)] + pub struct Features { + $($feature: bool,)* + activated: Vec, + } + + impl Feature { + $( + pub fn $feature() -> &'static Feature { + fn get(features: &Features) -> bool { + features.$feature + } + static FEAT: Feature = Feature { + name: stringify!($feature), + get, + }; + &FEAT + } + )* + + fn is_enabled(&self, features: &Features) -> bool { + (self.get)(features) + } + } + + impl Features { + fn status(&mut self, feature: &str) -> Option<(&mut bool, Status)> { + if feature.contains("_") { + return None + } + let feature = feature.replace("-", "_"); + $( + if feature == stringify!($feature) { + return Some((&mut self.$feature, stab!($stab))) + } + )* + None + } + } + ) +} + +macro_rules! stab { + (stable) => (Status::Stable); + (unstable) => (Status::Unstable); +} + +/// A listing of all features in Cargo +/// +/// "look here" +/// +/// This is the macro that lists all stable and unstable features in Cargo. +/// You'll want to add to this macro whenever you add a feature to Cargo, also +/// following the directions above. +/// +/// Note that all feature names here are valid Rust identifiers, but the `_` +/// character is translated to `-` when specified in the `cargo-features` +/// manifest entry in `Cargo.toml`. +features! { + pub struct Features { + + // A dummy feature that doesn't actually gate anything, but it's used in + // testing to ensure that we can enable stable features. + [stable] test_dummy_stable: bool, + + // A dummy feature that gates the usage of the `im-a-teapot` manifest + // entry. This is basically just intended for tests. + [unstable] test_dummy_unstable: bool, + + // Downloading packages from alternative registry indexes. + [unstable] alternative_registries: bool, + + // Using epochs + [unstable] epoch: bool, + + // Renaming a package in the manifest via the `package` key + [unstable] rename_dependency: bool, + + // Whether a lock file is published with this crate + [unstable] publish_lockfile: bool, + } +} + +pub struct Feature { + name: &'static str, + get: fn(&Features) -> bool, +} + +impl Features { + pub fn new(features: &[String], warnings: &mut Vec) -> CargoResult { + let mut ret = Features::default(); + for feature in features { + ret.add(feature, warnings)?; + ret.activated.push(feature.to_string()); + } + Ok(ret) + } + + fn add(&mut self, feature: &str, warnings: &mut Vec) -> CargoResult<()> { + let (slot, status) = match self.status(feature) { + Some(p) => p, + None => bail!("unknown cargo feature `{}`", feature), + }; + + if *slot { + bail!("the cargo feature `{}` has already been activated", feature); + } + + match status { + Status::Stable => { + let warning = format!( + "the cargo feature `{}` is now stable \ + and is no longer necessary to be listed \ + in the manifest", + feature + ); + warnings.push(warning); + } + Status::Unstable if !nightly_features_allowed() => bail!( + "the cargo feature `{}` requires a nightly version of \ + Cargo, but this is the `{}` channel", + feature, + channel() + ), + Status::Unstable => {} + } + + *slot = true; + + Ok(()) + } + + pub fn activated(&self) -> &[String] { + &self.activated + } + + pub fn require(&self, feature: &Feature) -> CargoResult<()> { + if feature.is_enabled(self) { + Ok(()) + } else { + let feature = feature.name.replace("_", "-"); + let mut msg = format!("feature `{}` is required", feature); + + if nightly_features_allowed() { + let s = format!( + "\n\nconsider adding `cargo-features = [\"{0}\"]` \ + to the manifest", + feature + ); + msg.push_str(&s); + } else { + let s = format!( + "\n\n\ + this Cargo does not support nightly features, but if you\n\ + switch to nightly channel you can add\n\ + `cargo-features = [\"{}\"]` to enable this feature", + feature + ); + msg.push_str(&s); + } + bail!("{}", msg); + } + } + + pub fn is_enabled(&self, feature: &Feature) -> bool { + feature.is_enabled(self) + } +} + +/// A parsed representation of all unstable flags that Cargo accepts. +/// +/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for +/// gating unstable functionality to Cargo. These flags are only available on +/// the nightly channel of Cargo. +/// +/// This struct doesn't have quite the same convenience macro that the features +/// have above, but the procedure should still be relatively stable for adding a +/// new unstable flag: +/// +/// 1. First, add a field to this `CliUnstable` structure. All flags are allowed +/// to have a value as the `-Z` flags are either of the form `-Z foo` or +/// `-Z foo=bar`, and it's up to you how to parse `bar`. +/// +/// 2. Add an arm to the match statement in `CliUnstable::add` below to match on +/// your new flag. The key (`k`) is what you're matching on and the value is +/// in `v`. +/// +/// 3. (optional) Add a new parsing function to parse your datatype. As of now +/// there's an example for `bool`, but more can be added! +/// +/// 4. In Cargo use `config.cli_unstable()` to get a reference to this structure +/// and then test for your flag or your value and act accordingly. +/// +/// If you have any trouble with this, please let us know! +#[derive(Default, Debug)] +pub struct CliUnstable { + pub print_im_a_teapot: bool, + pub unstable_options: bool, + pub offline: bool, + pub no_index_update: bool, + pub avoid_dev_deps: bool, + pub minimal_versions: bool, +} + +impl CliUnstable { + pub fn parse(&mut self, flags: &[String]) -> CargoResult<()> { + if !flags.is_empty() && !nightly_features_allowed() { + bail!("the `-Z` flag is only accepted on the nightly channel of Cargo") + } + for flag in flags { + self.add(flag)?; + } + Ok(()) + } + + fn add(&mut self, flag: &str) -> CargoResult<()> { + let mut parts = flag.splitn(2, '='); + let k = parts.next().unwrap(); + let v = parts.next(); + + fn parse_bool(value: Option<&str>) -> CargoResult { + match value { + None | Some("yes") => Ok(true), + Some("no") => Ok(false), + Some(s) => bail!("expected `no` or `yes`, found: {}", s), + } + } + + match k { + "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(v)?, + "unstable-options" => self.unstable_options = true, + "offline" => self.offline = true, + "no-index-update" => self.no_index_update = true, + "avoid-dev-deps" => self.avoid_dev_deps = true, + "minimal-versions" => self.minimal_versions = true, + _ => bail!("unknown `-Z` flag specified: {}", k), + } + + Ok(()) + } +} + +fn channel() -> String { + env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| { + ::version() + .cfg_info + .map(|c| c.release_channel) + .unwrap_or_else(|| String::from("dev")) + }) +} + +fn nightly_features_allowed() -> bool { + match &channel()[..] { + "nightly" | "dev" => true, + _ => false, + } +} diff --git a/src/cargo/core/interning.rs b/src/cargo/core/interning.rs new file mode 100644 index 000000000..395ce43ca --- /dev/null +++ b/src/cargo/core/interning.rs @@ -0,0 +1,96 @@ +use std::fmt; +use std::sync::RwLock; +use std::collections::HashSet; +use std::slice; +use std::str; +use std::mem; +use std::cmp::Ordering; +use std::ops::Deref; +use std::hash::{Hash, Hasher}; + +pub fn leek(s: String) -> &'static str { + let boxed = s.into_boxed_str(); + let ptr = boxed.as_ptr(); + let len = boxed.len(); + mem::forget(boxed); + unsafe { + let slice = slice::from_raw_parts(ptr, len); + str::from_utf8_unchecked(slice) + } +} + +lazy_static! { + static ref STRING_CASHE: RwLock> = + RwLock::new(HashSet::new()); +} + +#[derive(Eq, PartialEq, Clone, Copy)] +pub struct InternedString { + ptr: *const u8, + len: usize, +} + +impl InternedString { + pub fn new(str: &str) -> InternedString { + let mut cache = STRING_CASHE.write().unwrap(); + if let Some(&s) = cache.get(str) { + return InternedString { + ptr: s.as_ptr(), + len: s.len(), + }; + } + let s = leek(str.to_string()); + cache.insert(s); + InternedString { + ptr: s.as_ptr(), + len: s.len(), + } + } + pub fn to_inner(&self) -> &'static str { + unsafe { + let slice = slice::from_raw_parts(self.ptr, self.len); + &str::from_utf8_unchecked(slice) + } + } +} + +impl Deref for InternedString { + type Target = str; + + fn deref(&self) -> &'static str { + self.to_inner() + } +} + +impl Hash for InternedString { + fn hash(&self, state: &mut H) { + self.to_inner().hash(state); + } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(self.to_inner(), f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self.to_inner(), f) + } +} + +impl Ord for InternedString { + fn cmp(&self, other: &InternedString) -> Ordering { + self.to_inner().cmp(&*other) + } +} + +impl PartialOrd for InternedString { + fn partial_cmp(&self, other: &InternedString) -> Option { + Some(self.cmp(other)) + } +} + +unsafe impl Send for InternedString {} +unsafe impl Sync for InternedString {} diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs new file mode 100644 index 000000000..b3ab42659 --- /dev/null +++ b/src/cargo/core/manifest.rs @@ -0,0 +1,815 @@ +use std::collections::{BTreeMap, HashMap}; +use std::fmt; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::hash::{Hash, Hasher}; + +use semver::Version; +use serde::ser; +use url::Url; + +use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; +use core::{Epoch, Feature, Features, WorkspaceConfig}; +use core::interning::InternedString; +use util::Config; +use util::toml::TomlManifest; +use util::errors::*; + +pub enum EitherManifest { + Real(Manifest), + Virtual(VirtualManifest), +} + +/// Contains all the information about a package, as loaded from a Cargo.toml. +#[derive(Clone, Debug)] +pub struct Manifest { + summary: Summary, + targets: Vec, + links: Option, + warnings: Vec, + exclude: Vec, + include: Vec, + metadata: ManifestMetadata, + profiles: Profiles, + publish: Option>, + publish_lockfile: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + original: Rc, + features: Features, + epoch: Epoch, + im_a_teapot: Option, +} + +/// When parsing `Cargo.toml`, some warnings should silenced +/// if the manifest comes from a dependency. `ManifestWarning` +/// allows this delayed emission of warnings. +#[derive(Clone, Debug)] +pub struct DelayedWarning { + pub message: String, + pub is_critical: bool, +} + +#[derive(Clone, Debug)] +pub struct VirtualManifest { + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles, +} + +/// General metadata about a package which is just blindly uploaded to the +/// registry. +/// +/// Note that many of these fields can contain invalid values such as the +/// homepage, repository, documentation, or license. These fields are not +/// validated by cargo itself, but rather it is up to the registry when uploaded +/// to validate these fields. Cargo will itself accept any valid TOML +/// specification for these values. +#[derive(PartialEq, Clone, Debug)] +pub struct ManifestMetadata { + pub authors: Vec, + pub keywords: Vec, + pub categories: Vec, + pub license: Option, + pub license_file: Option, + pub description: Option, // not markdown + pub readme: Option, // file, not contents + pub homepage: Option, // url + pub repository: Option, // url + pub documentation: Option, // url + pub badges: BTreeMap>, + pub links: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum LibKind { + Lib, + Rlib, + Dylib, + ProcMacro, + Other(String), +} + +impl LibKind { + pub fn from_str(string: &str) -> LibKind { + match string { + "lib" => LibKind::Lib, + "rlib" => LibKind::Rlib, + "dylib" => LibKind::Dylib, + "proc-macro" => LibKind::ProcMacro, + s => LibKind::Other(s.to_string()), + } + } + + /// Returns the argument suitable for `--crate-type` to pass to rustc. + pub fn crate_type(&self) -> &str { + match *self { + LibKind::Lib => "lib", + LibKind::Rlib => "rlib", + LibKind::Dylib => "dylib", + LibKind::ProcMacro => "proc-macro", + LibKind::Other(ref s) => s, + } + } + + pub fn linkable(&self) -> bool { + match *self { + LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true, + LibKind::Other(..) => false, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum TargetKind { + Lib(Vec), + Bin, + Test, + Bench, + ExampleLib(Vec), + ExampleBin, + CustomBuild, +} + +impl ser::Serialize for TargetKind { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + use self::TargetKind::*; + match *self { + Lib(ref kinds) => kinds.iter().map(LibKind::crate_type).collect(), + Bin => vec!["bin"], + ExampleBin | ExampleLib(_) => vec!["example"], + Test => vec!["test"], + CustomBuild => vec!["custom-build"], + Bench => vec!["bench"], + }.serialize(s) + } +} + +// Note that most of the fields here are skipped when serializing because we +// don't want to export them just yet (becomes a public API of Cargo). Others +// though are definitely needed! +#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)] +pub struct Profile { + pub opt_level: String, + #[serde(skip_serializing)] pub lto: Lto, + #[serde(skip_serializing)] pub codegen_units: Option, // None = use rustc default + #[serde(skip_serializing)] pub rustc_args: Option>, + #[serde(skip_serializing)] pub rustdoc_args: Option>, + pub debuginfo: Option, + pub debug_assertions: bool, + pub overflow_checks: bool, + #[serde(skip_serializing)] pub rpath: bool, + pub test: bool, + #[serde(skip_serializing)] pub doc: bool, + #[serde(skip_serializing)] pub run_custom_build: bool, + #[serde(skip_serializing)] pub check: bool, + #[serde(skip_serializing)] pub panic: Option, + #[serde(skip_serializing)] pub incremental: bool, +} + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum Lto { + Bool(bool), + Named(String), +} + +#[derive(Default, Clone, Debug, PartialEq, Eq)] +pub struct Profiles { + pub release: Profile, + pub dev: Profile, + pub test: Profile, + pub test_deps: Profile, + pub bench: Profile, + pub bench_deps: Profile, + pub doc: Profile, + pub custom_build: Profile, + pub check: Profile, + pub check_test: Profile, + pub doctest: Profile, +} + +/// Information about a binary, a library, an example, etc. that is part of the +/// package. +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +pub struct Target { + kind: TargetKind, + name: String, + // Note that the `src_path` here is excluded from the `Hash` implementation + // as it's absolute currently and is otherwise a little too brittle for + // causing rebuilds. Instead the hash for the path that we send to the + // compiler is handled elsewhere. + src_path: NonHashedPathBuf, + required_features: Option>, + tested: bool, + benched: bool, + doc: bool, + doctest: bool, + harness: bool, // whether to use the test harness (--test) + for_host: bool, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +struct NonHashedPathBuf { + path: PathBuf, +} + +impl Hash for NonHashedPathBuf { + fn hash(&self, _: &mut H) { + // ... + } +} + +#[derive(Serialize)] +struct SerializedTarget<'a> { + /// Is this a `--bin bin`, `--lib`, `--example ex`? + /// Serialized as a list of strings for historical reasons. + kind: &'a TargetKind, + /// Corresponds to `--crate-type` compiler attribute. + /// See https://doc.rust-lang.org/reference/linkage.html + crate_types: Vec<&'a str>, + name: &'a str, + src_path: &'a PathBuf, +} + +impl ser::Serialize for Target { + fn serialize(&self, s: S) -> Result { + SerializedTarget { + kind: &self.kind, + crate_types: self.rustc_crate_types(), + name: &self.name, + src_path: &self.src_path.path, + }.serialize(s) + } +} + +impl Manifest { + pub fn new( + summary: Summary, + targets: Vec, + exclude: Vec, + include: Vec, + links: Option, + metadata: ManifestMetadata, + profiles: Profiles, + publish: Option>, + publish_lockfile: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + features: Features, + epoch: Epoch, + im_a_teapot: Option, + original: Rc, + ) -> Manifest { + Manifest { + summary, + targets, + warnings: Vec::new(), + exclude, + include, + links, + metadata, + profiles, + publish, + replace, + patch, + workspace, + features, + epoch, + original, + im_a_teapot, + publish_lockfile, + } + } + + pub fn dependencies(&self) -> &[Dependency] { + self.summary.dependencies() + } + pub fn exclude(&self) -> &[String] { + &self.exclude + } + pub fn include(&self) -> &[String] { + &self.include + } + pub fn metadata(&self) -> &ManifestMetadata { + &self.metadata + } + pub fn name(&self) -> InternedString { + self.package_id().name() + } + pub fn package_id(&self) -> &PackageId { + self.summary.package_id() + } + pub fn summary(&self) -> &Summary { + &self.summary + } + pub fn targets(&self) -> &[Target] { + &self.targets + } + pub fn version(&self) -> &Version { + self.package_id().version() + } + pub fn warnings(&self) -> &[DelayedWarning] { + &self.warnings + } + pub fn profiles(&self) -> &Profiles { + &self.profiles + } + pub fn publish(&self) -> &Option> { + &self.publish + } + pub fn publish_lockfile(&self) -> bool { + self.publish_lockfile + } + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { + &self.replace + } + pub fn original(&self) -> &TomlManifest { + &self.original + } + pub fn patch(&self) -> &HashMap> { + &self.patch + } + pub fn links(&self) -> Option<&str> { + self.links.as_ref().map(|s| &s[..]) + } + + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn features(&self) -> &Features { + &self.features + } + + pub fn add_warning(&mut self, s: String) { + self.warnings.push(DelayedWarning { + message: s, + is_critical: false, + }) + } + + pub fn add_critical_warning(&mut self, s: String) { + self.warnings.push(DelayedWarning { + message: s, + is_critical: true, + }) + } + + pub fn set_summary(&mut self, summary: Summary) { + self.summary = summary; + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest { + Manifest { + summary: self.summary.map_source(to_replace, replace_with), + ..self + } + } + + pub fn feature_gate(&self) -> CargoResult<()> { + if self.im_a_teapot.is_some() { + self.features + .require(Feature::test_dummy_unstable()) + .chain_err(|| { + format_err!( + "the `im-a-teapot` manifest key is unstable and may \ + not work properly in England" + ) + })?; + } + + Ok(()) + } + + // Just a helper function to test out `-Z` flags on Cargo + pub fn print_teapot(&self, config: &Config) { + if let Some(teapot) = self.im_a_teapot { + if config.cli_unstable().print_im_a_teapot { + println!("im-a-teapot = {}", teapot); + } + } + } + + pub fn epoch(&self) -> Epoch { + self.epoch + } +} + +impl VirtualManifest { + pub fn new( + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles, + ) -> VirtualManifest { + VirtualManifest { + replace, + patch, + workspace, + profiles, + } + } + + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { + &self.replace + } + + pub fn patch(&self) -> &HashMap> { + &self.patch + } + + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn profiles(&self) -> &Profiles { + &self.profiles + } +} + +impl Target { + fn with_path(src_path: PathBuf) -> Target { + assert!(src_path.is_absolute()); + Target { + kind: TargetKind::Bin, + name: String::new(), + src_path: NonHashedPathBuf { path: src_path }, + required_features: None, + doc: false, + doctest: false, + harness: true, + for_host: false, + tested: true, + benched: true, + } + } + + pub fn lib_target(name: &str, crate_targets: Vec, src_path: PathBuf) -> Target { + Target { + kind: TargetKind::Lib(crate_targets), + name: name.to_string(), + doctest: true, + doc: true, + ..Target::with_path(src_path) + } + } + + pub fn bin_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + ) -> Target { + Target { + kind: TargetKind::Bin, + name: name.to_string(), + required_features, + doc: true, + ..Target::with_path(src_path) + } + } + + /// Builds a `Target` corresponding to the `build = "build.rs"` entry. + pub fn custom_build_target(name: &str, src_path: PathBuf) -> Target { + Target { + kind: TargetKind::CustomBuild, + name: name.to_string(), + for_host: true, + benched: false, + tested: false, + ..Target::with_path(src_path) + } + } + + pub fn example_target( + name: &str, + crate_targets: Vec, + src_path: PathBuf, + required_features: Option>, + ) -> Target { + let kind = if crate_targets.is_empty() { + TargetKind::ExampleBin + } else { + TargetKind::ExampleLib(crate_targets) + }; + + Target { + kind, + name: name.to_string(), + required_features, + benched: false, + ..Target::with_path(src_path) + } + } + + pub fn test_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + ) -> Target { + Target { + kind: TargetKind::Test, + name: name.to_string(), + required_features, + benched: false, + ..Target::with_path(src_path) + } + } + + pub fn bench_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + ) -> Target { + Target { + kind: TargetKind::Bench, + name: name.to_string(), + required_features, + tested: false, + ..Target::with_path(src_path) + } + } + + pub fn name(&self) -> &str { + &self.name + } + pub fn crate_name(&self) -> String { + self.name.replace("-", "_") + } + pub fn src_path(&self) -> &Path { + &self.src_path.path + } + pub fn required_features(&self) -> Option<&Vec> { + self.required_features.as_ref() + } + pub fn kind(&self) -> &TargetKind { + &self.kind + } + pub fn tested(&self) -> bool { + self.tested + } + pub fn harness(&self) -> bool { + self.harness + } + pub fn documented(&self) -> bool { + self.doc + } + pub fn for_host(&self) -> bool { + self.for_host + } + pub fn benched(&self) -> bool { + self.benched + } + + pub fn doctested(&self) -> bool { + self.doctest && match self.kind { + TargetKind::Lib(ref kinds) => kinds + .iter() + .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro), + _ => false, + } + } + + pub fn allows_underscores(&self) -> bool { + self.is_bin() || self.is_example() || self.is_custom_build() + } + + pub fn is_lib(&self) -> bool { + match self.kind { + TargetKind::Lib(_) => true, + _ => false, + } + } + + pub fn is_dylib(&self) -> bool { + match self.kind { + TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib), + _ => false, + } + } + + pub fn is_cdylib(&self) -> bool { + let libs = match self.kind { + TargetKind::Lib(ref libs) => libs, + _ => return false, + }; + libs.iter().any(|l| match *l { + LibKind::Other(ref s) => s == "cdylib", + _ => false, + }) + } + + pub fn linkable(&self) -> bool { + match self.kind { + TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()), + _ => false, + } + } + + pub fn is_bin(&self) -> bool { + self.kind == TargetKind::Bin + } + + pub fn is_example(&self) -> bool { + match self.kind { + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true, + _ => false, + } + } + + pub fn is_bin_example(&self) -> bool { + // Needed for --all-examples in contexts where only runnable examples make sense + match self.kind { + TargetKind::ExampleBin => true, + _ => false, + } + } + + pub fn is_test(&self) -> bool { + self.kind == TargetKind::Test + } + pub fn is_bench(&self) -> bool { + self.kind == TargetKind::Bench + } + pub fn is_custom_build(&self) -> bool { + self.kind == TargetKind::CustomBuild + } + + /// Returns the arguments suitable for `--crate-type` to pass to rustc. + pub fn rustc_crate_types(&self) -> Vec<&str> { + match self.kind { + TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => { + kinds.iter().map(LibKind::crate_type).collect() + } + TargetKind::CustomBuild + | TargetKind::Bench + | TargetKind::Test + | TargetKind::ExampleBin + | TargetKind::Bin => vec!["bin"], + } + } + + pub fn can_lto(&self) -> bool { + match self.kind { + TargetKind::Lib(ref v) => { + !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib) + && !v.contains(&LibKind::Lib) + } + _ => true, + } + } + + pub fn set_tested(&mut self, tested: bool) -> &mut Target { + self.tested = tested; + self + } + pub fn set_benched(&mut self, benched: bool) -> &mut Target { + self.benched = benched; + self + } + pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { + self.doctest = doctest; + self + } + pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { + self.for_host = for_host; + self + } + pub fn set_harness(&mut self, harness: bool) -> &mut Target { + self.harness = harness; + self + } + pub fn set_doc(&mut self, doc: bool) -> &mut Target { + self.doc = doc; + self + } +} + +impl fmt::Display for Target { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.kind { + TargetKind::Lib(..) => write!(f, "Target(lib)"), + TargetKind::Bin => write!(f, "Target(bin: {})", self.name), + TargetKind::Test => write!(f, "Target(test: {})", self.name), + TargetKind::Bench => write!(f, "Target(bench: {})", self.name), + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { + write!(f, "Target(example: {})", self.name) + } + TargetKind::CustomBuild => write!(f, "Target(script)"), + } + } +} + +impl Profile { + pub fn default_dev() -> Profile { + Profile { + debuginfo: Some(2), + debug_assertions: true, + overflow_checks: true, + incremental: true, + ..Profile::default() + } + } + + pub fn default_release() -> Profile { + Profile { + opt_level: "3".to_string(), + debuginfo: None, + ..Profile::default() + } + } + + pub fn default_test() -> Profile { + Profile { + test: true, + ..Profile::default_dev() + } + } + + pub fn default_bench() -> Profile { + Profile { + test: true, + ..Profile::default_release() + } + } + + pub fn default_doc() -> Profile { + Profile { + doc: true, + ..Profile::default_dev() + } + } + + pub fn default_custom_build() -> Profile { + Profile { + run_custom_build: true, + ..Profile::default_dev() + } + } + + pub fn default_check() -> Profile { + Profile { + check: true, + ..Profile::default_dev() + } + } + + pub fn default_check_test() -> Profile { + Profile { + check: true, + test: true, + ..Profile::default_dev() + } + } + + pub fn default_doctest() -> Profile { + Profile { + doc: true, + test: true, + ..Profile::default_dev() + } + } +} + +impl Default for Profile { + fn default() -> Profile { + Profile { + opt_level: "0".to_string(), + lto: Lto::Bool(false), + codegen_units: None, + rustc_args: None, + rustdoc_args: None, + debuginfo: None, + debug_assertions: false, + overflow_checks: false, + rpath: false, + test: false, + doc: false, + run_custom_build: false, + check: false, + panic: None, + incremental: false, + } + } +} + +impl fmt::Display for Profile { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.test { + write!(f, "Profile(test)") + } else if self.doc { + write!(f, "Profile(doc)") + } else if self.run_custom_build { + write!(f, "Profile(run)") + } else if self.check { + write!(f, "Profile(check)") + } else { + write!(f, "Profile(build)") + } + } +} diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs new file mode 100644 index 000000000..dcd94cc96 --- /dev/null +++ b/src/cargo/core/mod.rs @@ -0,0 +1,27 @@ +pub use self::dependency::Dependency; +pub use self::features::{CliUnstable, Epoch, Feature, Features}; +pub use self::manifest::{EitherManifest, VirtualManifest}; +pub use self::manifest::{LibKind, Manifest, Profile, Profiles, Target, TargetKind}; +pub use self::package::{Package, PackageSet}; +pub use self::package_id::PackageId; +pub use self::package_id_spec::PackageIdSpec; +pub use self::registry::Registry; +pub use self::resolver::Resolve; +pub use self::shell::{Shell, Verbosity}; +pub use self::source::{GitReference, Source, SourceId, SourceMap}; +pub use self::summary::Summary; +pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; + +pub mod source; +pub mod package; +pub mod package_id; +pub mod dependency; +pub mod manifest; +pub mod resolver; +pub mod summary; +pub mod shell; +pub mod registry; +mod interning; +mod package_id_spec; +mod workspace; +mod features; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs new file mode 100644 index 000000000..f3ce2eaf2 --- /dev/null +++ b/src/cargo/core/package.rs @@ -0,0 +1,240 @@ +use std::cell::{Ref, RefCell}; +use std::collections::{BTreeMap, HashMap}; +use std::fmt; +use std::hash; +use std::path::{Path, PathBuf}; + +use semver::Version; +use serde::ser; +use toml; +use lazycell::LazyCell; + +use core::{Dependency, Manifest, PackageId, SourceId, Target}; +use core::{SourceMap, Summary}; +use core::interning::InternedString; +use util::{internal, lev_distance, Config}; +use util::errors::{CargoResult, CargoResultExt}; + +/// Information about a package that is available somewhere in the file system. +/// +/// A package is a `Cargo.toml` file plus all the files that are part of it. +// TODO: Is manifest_path a relic? +#[derive(Clone, Debug)] +pub struct Package { + /// The package's manifest + manifest: Manifest, + /// The root of the package + manifest_path: PathBuf, +} + +/// A Package in a form where `Serialize` can be derived. +#[derive(Serialize)] +struct SerializedPackage<'a> { + name: &'a str, + version: &'a str, + id: &'a PackageId, + license: Option<&'a str>, + license_file: Option<&'a str>, + description: Option<&'a str>, + source: &'a SourceId, + dependencies: &'a [Dependency], + targets: &'a [Target], + features: &'a BTreeMap>, + manifest_path: &'a str, +} + +impl ser::Serialize for Package { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + let summary = self.manifest.summary(); + let package_id = summary.package_id(); + let manmeta = self.manifest.metadata(); + let license = manmeta.license.as_ref().map(String::as_ref); + let license_file = manmeta.license_file.as_ref().map(String::as_ref); + let description = manmeta.description.as_ref().map(String::as_ref); + + SerializedPackage { + name: &*package_id.name(), + version: &package_id.version().to_string(), + id: package_id, + license, + license_file, + description, + source: summary.source_id(), + dependencies: summary.dependencies(), + targets: self.manifest.targets(), + features: summary.features(), + manifest_path: &self.manifest_path.display().to_string(), + }.serialize(s) + } +} + +impl Package { + /// Create a package from a manifest and its location + pub fn new(manifest: Manifest, manifest_path: &Path) -> Package { + Package { + manifest, + manifest_path: manifest_path.to_path_buf(), + } + } + + /// Get the manifest dependencies + pub fn dependencies(&self) -> &[Dependency] { + self.manifest.dependencies() + } + /// Get the manifest + pub fn manifest(&self) -> &Manifest { + &self.manifest + } + /// Get the path to the manifest + pub fn manifest_path(&self) -> &Path { + &self.manifest_path + } + /// Get the name of the package + pub fn name(&self) -> InternedString { + self.package_id().name() + } + /// Get the PackageId object for the package (fully defines a package) + pub fn package_id(&self) -> &PackageId { + self.manifest.package_id() + } + /// Get the root folder of the package + pub fn root(&self) -> &Path { + self.manifest_path.parent().unwrap() + } + /// Get the summary for the package + pub fn summary(&self) -> &Summary { + self.manifest.summary() + } + /// Get the targets specified in the manifest + pub fn targets(&self) -> &[Target] { + self.manifest.targets() + } + /// Get the current package version + pub fn version(&self) -> &Version { + self.package_id().version() + } + /// Get the package authors + pub fn authors(&self) -> &Vec { + &self.manifest.metadata().authors + } + /// Whether the package is set to publish + pub fn publish(&self) -> &Option> { + self.manifest.publish() + } + + /// Whether the package uses a custom build script for any target + pub fn has_custom_build(&self) -> bool { + self.targets().iter().any(|t| t.is_custom_build()) + } + + pub fn find_closest_target( + &self, + target: &str, + is_expected_kind: fn(&Target) -> bool, + ) -> Option<&Target> { + let targets = self.targets(); + + let matches = targets + .iter() + .filter(|t| is_expected_kind(t)) + .map(|t| (lev_distance(target, t.name()), t)) + .filter(|&(d, _)| d < 4); + matches.min_by_key(|t| t.0).map(|t| t.1) + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package { + Package { + manifest: self.manifest.map_source(to_replace, replace_with), + manifest_path: self.manifest_path, + } + } + + pub fn to_registry_toml(&self, config: &Config) -> CargoResult { + let manifest = self.manifest().original().prepare_for_publish(config)?; + let toml = toml::to_string(&manifest)?; + Ok(format!( + "\ + # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\ + #\n\ + # When uploading crates to the registry Cargo will automatically\n\ + # \"normalize\" Cargo.toml files for maximal compatibility\n\ + # with all versions of Cargo and also rewrite `path` dependencies\n\ + # to registry (e.g. crates.io) dependencies\n\ + #\n\ + # If you believe there's an error in this file please file an\n\ + # issue against the rust-lang/cargo repository. If you're\n\ + # editing this file be aware that the upstream Cargo.toml\n\ + # will likely look very different (and much more reasonable)\n\ + \n\ + {}\ + ", + toml + )) + } +} + +impl fmt::Display for Package { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.summary().package_id()) + } +} + +impl PartialEq for Package { + fn eq(&self, other: &Package) -> bool { + self.package_id() == other.package_id() + } +} + +impl Eq for Package {} + +impl hash::Hash for Package { + fn hash(&self, into: &mut H) { + self.package_id().hash(into) + } +} + +pub struct PackageSet<'cfg> { + packages: HashMap>, + sources: RefCell>, +} + +impl<'cfg> PackageSet<'cfg> { + pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> { + PackageSet { + packages: package_ids + .iter() + .map(|id| (id.clone(), LazyCell::new())) + .collect(), + sources: RefCell::new(sources), + } + } + + pub fn package_ids<'a>(&'a self) -> Box + 'a> { + Box::new(self.packages.keys()) + } + + pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { + let slot = self.packages + .get(id) + .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; + if let Some(pkg) = slot.borrow() { + return Ok(pkg); + } + let mut sources = self.sources.borrow_mut(); + let source = sources + .get_mut(id.source_id()) + .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; + let pkg = source + .download(id) + .chain_err(|| format_err!("unable to get packages from source"))?; + assert!(slot.fill(pkg).is_ok()); + Ok(slot.borrow().unwrap()) + } + + pub fn sources(&self) -> Ref> { + self.sources.borrow() + } +} diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs new file mode 100644 index 000000000..7bb64e8a1 --- /dev/null +++ b/src/cargo/core/package_id.rs @@ -0,0 +1,198 @@ +use std::cmp::Ordering; +use std::fmt::{self, Formatter}; +use std::hash::Hash; +use std::hash; +use std::path::Path; +use std::sync::Arc; + +use semver; +use serde::de; +use serde::ser; + +use util::{CargoResult, ToSemver}; +use core::source::SourceId; +use core::interning::InternedString; + +/// Identifier for a specific version of a package in a specific source. +#[derive(Clone)] +pub struct PackageId { + inner: Arc, +} + +#[derive(PartialEq, PartialOrd, Eq, Ord)] +struct PackageIdInner { + name: InternedString, + version: semver::Version, + source_id: SourceId, +} + +impl ser::Serialize for PackageId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + s.collect_str(&format_args!( + "{} {} ({})", + self.inner.name, + self.inner.version, + self.inner.source_id.to_url() + )) + } +} + +impl<'de> de::Deserialize<'de> for PackageId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + let mut s = string.splitn(3, ' '); + let name = s.next().unwrap(); + let version = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let version = semver::Version::parse(version).map_err(de::Error::custom)?; + let url = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let url = if url.starts_with('(') && url.ends_with(')') { + &url[1..url.len() - 1] + } else { + return Err(de::Error::custom("invalid serialized PackageId")); + }; + let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; + + Ok(PackageId { + inner: Arc::new(PackageIdInner { + name: InternedString::new(name), + version, + source_id, + }), + }) + } +} + +impl Hash for PackageId { + fn hash(&self, state: &mut S) { + self.inner.name.hash(state); + self.inner.version.hash(state); + self.inner.source_id.hash(state); + } +} + +impl PartialEq for PackageId { + fn eq(&self, other: &PackageId) -> bool { + (*self.inner).eq(&*other.inner) + } +} +impl PartialOrd for PackageId { + fn partial_cmp(&self, other: &PackageId) -> Option { + (*self.inner).partial_cmp(&*other.inner) + } +} +impl Eq for PackageId {} +impl Ord for PackageId { + fn cmp(&self, other: &PackageId) -> Ordering { + (*self.inner).cmp(&*other.inner) + } +} + +impl PackageId { + pub fn new(name: &str, version: T, sid: &SourceId) -> CargoResult { + let v = version.to_semver()?; + Ok(PackageId { + inner: Arc::new(PackageIdInner { + name: InternedString::new(name), + version: v, + source_id: sid.clone(), + }), + }) + } + + pub fn name(&self) -> InternedString { + self.inner.name + } + pub fn version(&self) -> &semver::Version { + &self.inner.version + } + pub fn source_id(&self) -> &SourceId { + &self.inner.source_id + } + + pub fn with_precise(&self, precise: Option) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name, + version: self.inner.version.clone(), + source_id: self.inner.source_id.with_precise(precise), + }), + } + } + + pub fn with_source_id(&self, source: &SourceId) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name, + version: self.inner.version.clone(), + source_id: source.clone(), + }), + } + } + + pub fn stable_hash<'a>(&'a self, workspace: &'a Path) -> PackageIdStableHash<'a> { + PackageIdStableHash(self, workspace) + } +} + +pub struct PackageIdStableHash<'a>(&'a PackageId, &'a Path); + +impl<'a> Hash for PackageIdStableHash<'a> { + fn hash(&self, state: &mut S) { + self.0.inner.name.hash(state); + self.0.inner.version.hash(state); + self.0.inner.source_id.stable_hash(self.1, state); + } +} + +impl fmt::Display for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "{} v{}", self.inner.name, self.inner.version)?; + + if !self.inner.source_id.is_default_registry() { + write!(f, " ({})", self.inner.source_id)?; + } + + Ok(()) + } +} + +impl fmt::Debug for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_struct("PackageId") + .field("name", &self.inner.name) + .field("version", &self.inner.version.to_string()) + .field("source", &self.inner.source_id.to_string()) + .finish() + } +} + +#[cfg(test)] +mod tests { + use super::PackageId; + use core::source::SourceId; + use sources::CRATES_IO; + use util::ToUrl; + + #[test] + fn invalid_version_handled_nicely() { + let loc = CRATES_IO.to_url().unwrap(); + let repo = SourceId::for_registry(&loc).unwrap(); + + assert!(PackageId::new("foo", "1.0", &repo).is_err()); + assert!(PackageId::new("foo", "1", &repo).is_err()); + assert!(PackageId::new("foo", "bar", &repo).is_err()); + assert!(PackageId::new("foo", "", &repo).is_err()); + } +} diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs new file mode 100644 index 000000000..0dd1cacc8 --- /dev/null +++ b/src/cargo/core/package_id_spec.rs @@ -0,0 +1,319 @@ +use std::collections::HashMap; +use std::fmt; + +use semver::Version; +use url::Url; + +use core::PackageId; +use util::{ToSemver, ToUrl}; +use util::errors::{CargoResult, CargoResultExt}; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct PackageIdSpec { + name: String, + version: Option, + url: Option, +} + +impl PackageIdSpec { + pub fn parse(spec: &str) -> CargoResult { + if spec.contains('/') { + if let Ok(url) = spec.to_url() { + return PackageIdSpec::from_url(url); + } + if !spec.contains("://") { + if let Ok(url) = Url::parse(&format!("cargo://{}", spec)) { + return PackageIdSpec::from_url(url); + } + } + } + let mut parts = spec.splitn(2, ':'); + let name = parts.next().unwrap(); + let version = match parts.next() { + Some(version) => Some(Version::parse(version)?), + None => None, + }; + for ch in name.chars() { + if !ch.is_alphanumeric() && ch != '_' && ch != '-' { + bail!("invalid character in pkgid `{}`: `{}`", spec, ch) + } + } + Ok(PackageIdSpec { + name: name.to_string(), + version, + url: None, + }) + } + + pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId> + where + I: IntoIterator, + { + let spec = PackageIdSpec::parse(spec) + .chain_err(|| format_err!("invalid package id specification: `{}`", spec))?; + spec.query(i) + } + + pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec { + PackageIdSpec { + name: package_id.name().to_string(), + version: Some(package_id.version().clone()), + url: Some(package_id.source_id().url().clone()), + } + } + + fn from_url(mut url: Url) -> CargoResult { + if url.query().is_some() { + bail!("cannot have a query string in a pkgid: {}", url) + } + let frag = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); + let (name, version) = { + let mut path = url.path_segments() + .ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?; + let path_name = path.next_back().ok_or_else(|| { + format_err!( + "pkgid urls must have at least one path \ + component: {}", + url + ) + })?; + match frag { + Some(fragment) => { + let mut parts = fragment.splitn(2, ':'); + let name_or_version = parts.next().unwrap(); + match parts.next() { + Some(part) => { + let version = part.to_semver()?; + (name_or_version.to_string(), Some(version)) + } + None => { + if name_or_version.chars().next().unwrap().is_alphabetic() { + (name_or_version.to_string(), None) + } else { + let version = name_or_version.to_semver()?; + (path_name.to_string(), Some(version)) + } + } + } + } + None => (path_name.to_string(), None), + } + }; + Ok(PackageIdSpec { + name, + version, + url: Some(url), + }) + } + + pub fn name(&self) -> &str { + &self.name + } + pub fn version(&self) -> Option<&Version> { + self.version.as_ref() + } + pub fn url(&self) -> Option<&Url> { + self.url.as_ref() + } + + pub fn set_url(&mut self, url: Url) { + self.url = Some(url); + } + + pub fn matches(&self, package_id: &PackageId) -> bool { + if self.name() != &*package_id.name() { + return false; + } + + if let Some(ref v) = self.version { + if v != package_id.version() { + return false; + } + } + + match self.url { + Some(ref u) => u == package_id.source_id().url(), + None => true, + } + } + + pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId> + where + I: IntoIterator, + { + let mut ids = i.into_iter().filter(|p| self.matches(*p)); + let ret = match ids.next() { + Some(id) => id, + None => bail!( + "package id specification `{}` \ + matched no packages", + self + ), + }; + return match ids.next() { + Some(other) => { + let mut msg = format!( + "There are multiple `{}` packages in \ + your project, and the specification \ + `{}` is ambiguous.\n\ + Please re-run this command \ + with `-p ` where `` is one \ + of the following:", + self.name(), + self + ); + let mut vec = vec![ret, other]; + vec.extend(ids); + minimize(&mut msg, &vec, self); + Err(format_err!("{}", msg)) + } + None => Ok(ret), + }; + + fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) { + let mut version_cnt = HashMap::new(); + for id in ids { + *version_cnt.entry(id.version()).or_insert(0) += 1; + } + for id in ids { + if version_cnt[id.version()] == 1 { + msg.push_str(&format!("\n {}:{}", spec.name(), id.version())); + } else { + msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id))); + } + } + } + } +} + +impl fmt::Display for PackageIdSpec { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut printed_name = false; + match self.url { + Some(ref url) => { + if url.scheme() == "cargo" { + write!(f, "{}{}", url.host().unwrap(), url.path())?; + } else { + write!(f, "{}", url)?; + } + if url.path_segments().unwrap().next_back().unwrap() != self.name { + printed_name = true; + write!(f, "#{}", self.name)?; + } + } + None => { + printed_name = true; + write!(f, "{}", self.name)? + } + } + if let Some(ref v) = self.version { + write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?; + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use core::{PackageId, SourceId}; + use super::PackageIdSpec; + use url::Url; + use semver::Version; + + #[test] + fn good_parsing() { + fn ok(spec: &str, expected: PackageIdSpec) { + let parsed = PackageIdSpec::parse(spec).unwrap(); + assert_eq!(parsed, expected); + assert_eq!(parsed.to_string(), spec); + } + + ok( + "http://crates.io/foo#1.2.3", + PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("http://crates.io/foo").unwrap()), + }, + ); + ok( + "http://crates.io/foo#bar:1.2.3", + PackageIdSpec { + name: "bar".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("http://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo", + PackageIdSpec { + name: "foo".to_string(), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#1.2.3", + PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#bar", + PackageIdSpec { + name: "bar".to_string(), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#bar:1.2.3", + PackageIdSpec { + name: "bar".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "foo", + PackageIdSpec { + name: "foo".to_string(), + version: None, + url: None, + }, + ); + ok( + "foo:1.2.3", + PackageIdSpec { + name: "foo".to_string(), + version: Some(Version::parse("1.2.3").unwrap()), + url: None, + }, + ); + } + + #[test] + fn bad_parsing() { + assert!(PackageIdSpec::parse("baz:").is_err()); + assert!(PackageIdSpec::parse("baz:*").is_err()); + assert!(PackageIdSpec::parse("baz:1.0").is_err()); + assert!(PackageIdSpec::parse("http://baz:1.0").is_err()); + assert!(PackageIdSpec::parse("http://#baz:1.0").is_err()); + } + + #[test] + fn matching() { + let url = Url::parse("http://example.com").unwrap(); + let sid = SourceId::for_registry(&url).unwrap(); + let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); + let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); + + assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo)); + assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); + assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo)); + assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo)); + } +} diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs new file mode 100644 index 000000000..ba1866bc8 --- /dev/null +++ b/src/cargo/core/registry.rs @@ -0,0 +1,715 @@ +use std::collections::HashMap; + +use semver::VersionReq; +use url::Url; + +use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; +use core::PackageSet; +use util::{profile, Config}; +use util::errors::{CargoResult, CargoResultExt}; +use sources::config::SourceConfigMap; + +/// Source of information about a group of packages. +/// +/// See also `core::Source`. +pub trait Registry { + /// Attempt to find the packages that match a dependency request. + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>; + + fn query_vec(&mut self, dep: &Dependency) -> CargoResult> { + let mut ret = Vec::new(); + self.query(dep, &mut |s| ret.push(s))?; + Ok(ret) + } + + /// Returns whether or not this registry will return summaries with + /// checksums listed. + fn supports_checksums(&self) -> bool; + + /// Returns whether or not this registry will return summaries with + /// the `precise` field in the source id listed. + fn requires_precise(&self) -> bool; +} + +impl<'a, T: ?Sized + Registry + 'a> Registry for Box { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + (**self).query(dep, f) + } + + fn supports_checksums(&self) -> bool { + (**self).supports_checksums() + } + + fn requires_precise(&self) -> bool { + (**self).requires_precise() + } +} + +/// This structure represents a registry of known packages. It internally +/// contains a number of `Box` instances which are used to load a +/// `Package` from. +/// +/// The resolution phase of Cargo uses this to drive knowledge about new +/// packages as well as querying for lists of new packages. It is here that +/// sources are updated (e.g. network operations) and overrides are +/// handled. +/// +/// The general idea behind this registry is that it is centered around the +/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to +/// a `Source`. Each `Source` in the map has been updated (using network +/// operations if necessary) and is ready to be queried for packages. +pub struct PackageRegistry<'cfg> { + sources: SourceMap<'cfg>, + + // A list of sources which are considered "overrides" which take precedent + // when querying for packages. + overrides: Vec, + + // Note that each SourceId does not take into account its `precise` field + // when hashing or testing for equality. When adding a new `SourceId`, we + // want to avoid duplicates in the `SourceMap` (to prevent re-updating the + // same git repo twice for example), but we also want to ensure that the + // loaded source is always updated. + // + // Sources with a `precise` field normally don't need to be updated because + // their contents are already on disk, but sources without a `precise` field + // almost always need to be updated. If we have a cached `Source` for a + // precise `SourceId`, then when we add a new `SourceId` that is not precise + // we want to ensure that the underlying source is updated. + // + // This is basically a long-winded way of saying that we want to know + // precisely what the keys of `sources` are, so this is a mapping of key to + // what exactly the key is. + source_ids: HashMap, + + locked: LockedMap, + source_config: SourceConfigMap<'cfg>, + + patches: HashMap>, + patches_locked: bool, + patches_available: HashMap>, +} + +type LockedMap = HashMap)>>>; + +#[derive(PartialEq, Eq, Clone, Copy)] +enum Kind { + Override, + Locked, + Normal, +} + +impl<'cfg> PackageRegistry<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let source_config = SourceConfigMap::new(config)?; + Ok(PackageRegistry { + sources: SourceMap::new(), + source_ids: HashMap::new(), + overrides: Vec::new(), + source_config, + locked: HashMap::new(), + patches: HashMap::new(), + patches_locked: false, + patches_available: HashMap::new(), + }) + } + + pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { + trace!("getting packages; sources={}", self.sources.len()); + PackageSet::new(package_ids, self.sources) + } + + fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { + match self.source_ids.get(namespace) { + // We've previously loaded this source, and we've already locked it, + // so we're not allowed to change it even if `namespace` has a + // slightly different precise version listed. + Some(&(_, Kind::Locked)) => { + debug!("load/locked {}", namespace); + return Ok(()); + } + + // If the previous source was not a precise source, then we can be + // sure that it's already been updated if we've already loaded it. + Some(&(ref previous, _)) if previous.precise().is_none() => { + debug!("load/precise {}", namespace); + return Ok(()); + } + + // If the previous source has the same precise version as we do, + // then we're done, otherwise we need to need to move forward + // updating this source. + Some(&(ref previous, _)) => { + if previous.precise() == namespace.precise() { + debug!("load/match {}", namespace); + return Ok(()); + } + debug!("load/mismatch {}", namespace); + } + None => { + debug!("load/missing {}", namespace); + } + } + + self.load(namespace, kind)?; + Ok(()) + } + + pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { + for id in ids.iter() { + self.ensure_loaded(id, Kind::Locked)?; + } + Ok(()) + } + + pub fn add_preloaded(&mut self, source: Box) { + self.add_source(source, Kind::Locked); + } + + fn add_source(&mut self, source: Box, kind: Kind) { + let id = source.source_id().clone(); + self.sources.insert(source); + self.source_ids.insert(id.clone(), (id, kind)); + } + + pub fn add_override(&mut self, source: Box) { + self.overrides.push(source.source_id().clone()); + self.add_source(source, Kind::Override); + } + + pub fn register_lock(&mut self, id: PackageId, deps: Vec) { + trace!("register_lock: {}", id); + for dep in deps.iter() { + trace!("\t-> {}", dep); + } + let sub_map = self.locked + .entry(id.source_id().clone()) + .or_insert_with(HashMap::new); + let sub_vec = sub_map + .entry(id.name().to_string()) + .or_insert_with(Vec::new); + sub_vec.push((id, deps)); + } + + /// Insert a `[patch]` section into this registry. + /// + /// This method will insert a `[patch]` section for the `url` specified, + /// with the given list of dependencies. The `url` specified is the URL of + /// the source to patch (for example this is `crates-io` in the manifest). + /// The `deps` is an array of all the entries in the `[patch]` section of + /// the manifest. + /// + /// Here the `deps` will be resolved to a precise version and stored + /// internally for future calls to `query` below. It's expected that `deps` + /// have had `lock_to` call already, if applicable. (e.g. if a lock file was + /// already present). + /// + /// Note that the patch list specified here *will not* be available to + /// `query` until `lock_patches` is called below, which should be called + /// once all patches have been added. + pub fn patch(&mut self, url: &Url, deps: &[Dependency]) -> CargoResult<()> { + // First up we need to actually resolve each `deps` specification to + // precisely one summary. We're not using the `query` method below as it + // internally uses maps we're building up as part of this method + // (`patches_available` and `patches). Instead we're going straight to + // the source to load information from it. + // + // Remember that each dependency listed in `[patch]` has to resolve to + // precisely one package, so that's why we're just creating a flat list + // of summaries which should be the same length as `deps` above. + let unlocked_summaries = deps.iter() + .map(|dep| { + debug!("registring a patch for `{}` with `{}`", url, dep.name()); + + // Go straight to the source for resolving `dep`. Load it as we + // normally would and then ask it directly for the list of summaries + // corresponding to this `dep`. + self.ensure_loaded(dep.source_id(), Kind::Normal) + .chain_err(|| { + format_err!( + "failed to load source for a dependency \ + on `{}`", + dep.name() + ) + })?; + + let mut summaries = self.sources + .get_mut(dep.source_id()) + .expect("loaded source not present") + .query_vec(dep)? + .into_iter(); + + let summary = match summaries.next() { + Some(summary) => summary, + None => bail!( + "patch for `{}` in `{}` did not resolve to any crates. If this is \ + unexpected, you may wish to consult: \ + https://github.com/rust-lang/cargo/issues/4678", + dep.name(), + url + ), + }; + if summaries.next().is_some() { + bail!( + "patch for `{}` in `{}` resolved to more than one candidate", + dep.name(), + url + ) + } + if summary.package_id().source_id().url() == url { + bail!( + "patch for `{}` in `{}` points to the same source, but \ + patches must point to different sources", + dep.name(), + url + ); + } + Ok(summary) + }) + .collect::>>() + .chain_err(|| format_err!("failed to resolve patches for `{}`", url))?; + + // Note that we do not use `lock` here to lock summaries! That step + // happens later once `lock_patches` is invoked. In the meantime though + // we want to fill in the `patches_available` map (later used in the + // `lock` method) and otherwise store the unlocked summaries in + // `patches` to get locked in a future call to `lock_patches`. + let ids = unlocked_summaries + .iter() + .map(|s| s.package_id()) + .cloned() + .collect(); + self.patches_available.insert(url.clone(), ids); + self.patches.insert(url.clone(), unlocked_summaries); + + Ok(()) + } + + /// Lock all patch summaries added via `patch`, making them available to + /// resolution via `query`. + /// + /// This function will internally `lock` each summary added via `patch` + /// above now that the full set of `patch` packages are known. This'll allow + /// us to correctly resolve overridden dependencies between patches + /// hopefully! + pub fn lock_patches(&mut self) { + assert!(!self.patches_locked); + for summaries in self.patches.values_mut() { + for summary in summaries { + *summary = lock(&self.locked, &self.patches_available, summary.clone()); + } + } + self.patches_locked = true; + } + + pub fn patches(&self) -> &HashMap> { + &self.patches + } + + fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { + (|| { + let source = self.source_config.load(source_id)?; + assert_eq!(source.source_id(), source_id); + + if kind == Kind::Override { + self.overrides.push(source_id.clone()); + } + self.add_source(source, kind); + + // Ensure the source has fetched all necessary remote data. + let _p = profile::start(format!("updating: {}", source_id)); + self.sources.get_mut(source_id).unwrap().update() + })() + .chain_err(|| format_err!("Unable to update {}", source_id))?; + Ok(()) + } + + fn query_overrides(&mut self, dep: &Dependency) -> CargoResult> { + for s in self.overrides.iter() { + let src = self.sources.get_mut(s).unwrap(); + let dep = Dependency::new_override(&*dep.name(), s); + let mut results = src.query_vec(&dep)?; + if !results.is_empty() { + return Ok(Some(results.remove(0))); + } + } + Ok(None) + } + + /// This function is used to transform a summary to another locked summary + /// if possible. This is where the concept of a lockfile comes into play. + /// + /// If a summary points at a package id which was previously locked, then we + /// override the summary's id itself, as well as all dependencies, to be + /// rewritten to the locked versions. This will transform the summary's + /// source to a precise source (listed in the locked version) as well as + /// transforming all of the dependencies from range requirements on + /// imprecise sources to exact requirements on precise sources. + /// + /// If a summary does not point at a package id which was previously locked, + /// or if any dependencies were added and don't have a previously listed + /// version, we still want to avoid updating as many dependencies as + /// possible to keep the graph stable. In this case we map all of the + /// summary's dependencies to be rewritten to a locked version wherever + /// possible. If we're unable to map a dependency though, we just pass it on + /// through. + pub fn lock(&self, summary: Summary) -> Summary { + assert!(self.patches_locked); + lock(&self.locked, &self.patches_available, summary) + } + + fn warn_bad_override( + &self, + override_summary: &Summary, + real_summary: &Summary, + ) -> CargoResult<()> { + let mut real_deps = real_summary.dependencies().iter().collect::>(); + + let boilerplate = "\ +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +http://doc.crates.io/specifying-dependencies.html#overriding-dependencies +"; + + for dep in override_summary.dependencies() { + if let Some(i) = real_deps.iter().position(|d| dep == *d) { + real_deps.remove(i); + continue; + } + let msg = format!( + "\ + path override for crate `{}` has altered the original list of\n\ + dependencies; the dependency on `{}` was either added or\n\ + modified to not match the previously resolved version\n\n\ + {}", + override_summary.package_id().name(), + dep.name(), + boilerplate + ); + self.source_config.config().shell().warn(&msg)?; + return Ok(()); + } + + if let Some(id) = real_deps.get(0) { + let msg = format!( + "\ + path override for crate `{}` has altered the original list of + dependencies; the dependency on `{}` was removed\n\n + {}", + override_summary.package_id().name(), + id.name(), + boilerplate + ); + self.source_config.config().shell().warn(&msg)?; + return Ok(()); + } + + Ok(()) + } +} + +impl<'cfg> Registry for PackageRegistry<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + assert!(self.patches_locked); + let (override_summary, n, to_warn) = { + // Look for an override and get ready to query the real source. + let override_summary = self.query_overrides(dep)?; + + // Next up on our list of candidates is to check the `[patch]` + // section of the manifest. Here we look through all patches + // relevant to the source that `dep` points to, and then we match + // name/version. Note that we don't use `dep.matches(..)` because + // the patches, by definition, come from a different source. + // This means that `dep.matches(..)` will always return false, when + // what we really care about is the name/version match. + let mut patches = Vec::::new(); + if let Some(extra) = self.patches.get(dep.source_id().url()) { + patches.extend( + extra + .iter() + .filter(|s| dep.matches_ignoring_source(s.package_id())) + .cloned(), + ); + } + + // A crucial feature of the `[patch]` feature is that we *don't* + // query the actual registry if we have a "locked" dependency. A + // locked dep basically just means a version constraint of `=a.b.c`, + // and because patches take priority over the actual source then if + // we have a candidate we're done. + if patches.len() == 1 && dep.is_locked() { + let patch = patches.remove(0); + match override_summary { + Some(summary) => (summary, 1, Some(patch)), + None => { + f(patch); + return Ok(()); + } + } + } else { + if !patches.is_empty() { + debug!( + "found {} patches with an unlocked dep on `{}` at {} \ + with `{}`, \ + looking at sources", + patches.len(), + dep.name(), + dep.source_id(), + dep.version_req() + ); + } + + // Ensure the requested source_id is loaded + self.ensure_loaded(dep.source_id(), Kind::Normal) + .chain_err(|| { + format_err!( + "failed to load source for a dependency \ + on `{}`", + dep.name() + ) + })?; + + let source = self.sources.get_mut(dep.source_id()); + match (override_summary, source) { + (Some(_), None) => bail!("override found but no real ones"), + (None, None) => return Ok(()), + + // If we don't have an override then we just ship + // everything upstairs after locking the summary + (None, Some(source)) => { + for patch in patches.iter() { + f(patch.clone()); + } + + // Our sources shouldn't ever come back to us with two + // summaries that have the same version. We could, + // however, have an `[patch]` section which is in use + // to override a version in the registry. This means + // that if our `summary` in this loop has the same + // version as something in `patches` that we've + // already selected, then we skip this `summary`. + let locked = &self.locked; + let all_patches = &self.patches_available; + return source.query(dep, &mut |summary| { + for patch in patches.iter() { + let patch = patch.package_id().version(); + if summary.package_id().version() == patch { + return; + } + } + f(lock(locked, all_patches, summary)) + }); + } + + // If we have an override summary then we query the source + // to sanity check its results. We don't actually use any of + // the summaries it gives us though. + (Some(override_summary), Some(source)) => { + if !patches.is_empty() { + bail!("found patches and a path override") + } + let mut n = 0; + let mut to_warn = None; + source.query(dep, &mut |summary| { + n += 1; + to_warn = Some(summary); + })?; + (override_summary, n, to_warn) + } + } + } + }; + + if n > 1 { + bail!("found an override with a non-locked list"); + } else if let Some(summary) = to_warn { + self.warn_bad_override(&override_summary, &summary)?; + } + f(self.lock(override_summary)); + Ok(()) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } +} + +fn lock(locked: &LockedMap, patches: &HashMap>, summary: Summary) -> Summary { + let pair = locked + .get(summary.source_id()) + .and_then(|map| map.get(&*summary.name())) + .and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id())); + + trace!("locking summary of {}", summary.package_id()); + + // Lock the summary's id if possible + let summary = match pair { + Some(&(ref precise, _)) => summary.override_id(precise.clone()), + None => summary, + }; + summary.map_dependencies(|dep| { + trace!("\t{}/{}/{}", dep.name(), dep.version_req(), dep.source_id()); + + // If we've got a known set of overrides for this summary, then + // one of a few cases can arise: + // + // 1. We have a lock entry for this dependency from the same + // source as it's listed as coming from. In this case we make + // sure to lock to precisely the given package id. + // + // 2. We have a lock entry for this dependency, but it's from a + // different source than what's listed, or the version + // requirement has changed. In this case we must discard the + // locked version because the dependency needs to be + // re-resolved. + // + // 3. We don't have a lock entry for this dependency, in which + // case it was likely an optional dependency which wasn't + // included previously so we just pass it through anyway. + // + // Cases 1/2 are handled by `matches_id` and case 3 is handled by + // falling through to the logic below. + if let Some(&(_, ref locked_deps)) = pair { + let locked = locked_deps.iter().find(|id| dep.matches_id(id)); + if let Some(locked) = locked { + trace!("\tfirst hit on {}", locked); + let mut dep = dep.clone(); + dep.lock_to(locked); + return dep; + } + } + + // If this dependency did not have a locked version, then we query + // all known locked packages to see if they match this dependency. + // If anything does then we lock it to that and move on. + let v = locked + .get(dep.source_id()) + .and_then(|map| map.get(&*dep.name())) + .and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id))); + if let Some(&(ref id, _)) = v { + trace!("\tsecond hit on {}", id); + let mut dep = dep.clone(); + dep.lock_to(id); + return dep; + } + + // Finally we check to see if any registered patches correspond to + // this dependency. + let v = patches.get(dep.source_id().url()).map(|vec| { + let dep2 = dep.clone(); + let mut iter = vec.iter().filter(move |p| { + dep2.name() == p.name() && dep2.version_req().matches(p.version()) + }); + (iter.next(), iter) + }); + if let Some((Some(patch_id), mut remaining)) = v { + assert!(remaining.next().is_none()); + let patch_source = patch_id.source_id(); + let patch_locked = locked + .get(patch_source) + .and_then(|m| m.get(&*patch_id.name())) + .map(|list| list.iter().any(|&(ref id, _)| id == patch_id)) + .unwrap_or(false); + + if patch_locked { + trace!("\tthird hit on {}", patch_id); + let req = VersionReq::exact(patch_id.version()); + let mut dep = dep.clone(); + dep.set_version_req(req); + return dep; + } + } + + trace!("\tnope, unlocked"); + dep + }) +} + +#[cfg(test)] +pub mod test { + use core::{Dependency, Registry, Summary}; + use util::CargoResult; + + pub struct RegistryBuilder { + summaries: Vec, + overrides: Vec, + } + + impl RegistryBuilder { + pub fn new() -> RegistryBuilder { + RegistryBuilder { + summaries: vec![], + overrides: vec![], + } + } + + pub fn summary(mut self, summary: Summary) -> RegistryBuilder { + self.summaries.push(summary); + self + } + + pub fn summaries(mut self, summaries: Vec) -> RegistryBuilder { + self.summaries.extend(summaries.into_iter()); + self + } + + pub fn add_override(mut self, summary: Summary) -> RegistryBuilder { + self.overrides.push(summary); + self + } + + pub fn overrides(mut self, summaries: Vec) -> RegistryBuilder { + self.overrides.extend(summaries.into_iter()); + self + } + + fn query_overrides(&self, dep: &Dependency) -> Vec { + self.overrides + .iter() + .filter(|s| s.name() == dep.name()) + .map(|s| s.clone()) + .collect() + } + } + + impl Registry for RegistryBuilder { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + debug!("querying; dep={:?}", dep); + + let overrides = self.query_overrides(dep); + + if overrides.is_empty() { + for s in self.summaries.iter() { + if dep.matches(s) { + f(s.clone()); + } + } + Ok(()) + } else { + for s in overrides { + f(s); + } + Ok(()) + } + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } + } +} diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs new file mode 100644 index 000000000..91b094acc --- /dev/null +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -0,0 +1,96 @@ +use std::collections::{HashMap, HashSet}; + +use core::{Dependency, PackageId}; +use core::resolver::{ConflictReason, Context}; + +pub(super) struct ConflictCache { + // `con_from_dep` is a cache of the reasons for each time we + // backtrack. For example after several backtracks we may have: + // + // con_from_dep[`foo = "^1.0.2"`] = vec![ + // map!{`foo=1.0.1`: Semver}, + // map!{`foo=1.0.0`: Semver}, + // ]; + // + // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"` + // if either `foo=1.0.1` OR `foo=1.0.0` are activated". + // + // Another example after several backtracks we may have: + // + // con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = vec![ + // map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver}, + // ]; + // + // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2, + // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated". + // + // This is used to make sure we don't queue work we know will fail. See the + // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this + // is so important, and there can probably be a better data structure here + // but for now this works well enough! + // + // Also, as a final note, this map is *not* ever removed from. This remains + // as a global cache which we never delete from. Any entry in this map is + // unconditionally true regardless of our resolution history of how we got + // here. + con_from_dep: HashMap>>, + // `past_conflict_triggers` is an + // of `past_conflicting_activations`. + // For every `PackageId` this lists the `Dependency`s that mention it in `past_conflicting_activations`. + dep_from_pid: HashMap>, +} + +impl ConflictCache { + pub fn new() -> ConflictCache { + ConflictCache { + con_from_dep: HashMap::new(), + dep_from_pid: HashMap::new(), + } + } + /// Finds any known set of conflicts, if any, + /// which are activated in `cx` and pass the `filter` specified? + pub fn find_conflicting( + &self, + cx: &Context, + dep: &Dependency, + filter: F, + ) -> Option<&HashMap> + where + for<'r> F: FnMut(&'r &HashMap) -> bool, + { + self.con_from_dep + .get(dep)? + .iter() + .filter(filter) + .find(|conflicting| cx.is_conflicting(None, conflicting)) + } + pub fn conflicting( + &self, + cx: &Context, + dep: &Dependency, + ) -> Option<&HashMap> { + self.find_conflicting(cx, dep, |_| true) + } + + /// Add to the cache a conflict of the form: + /// `dep` is known to be unresolvable if + /// all the `PackageId` entries are activated + pub fn insert(&mut self, dep: &Dependency, con: &HashMap) { + let past = self.con_from_dep + .entry(dep.clone()) + .or_insert_with(Vec::new); + if !past.contains(con) { + trace!("{} adding a skip {:?}", dep.name(), con); + past.push(con.clone()); + for c in con.keys() { + self.dep_from_pid + .entry(c.clone()) + .or_insert_with(HashSet::new) + .insert(dep.clone()); + } + } + } + pub fn dependencies_conflicting_with(&self, pid: &PackageId) -> Option<&HashSet> { + self.dep_from_pid.get(pid) + } +} diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs new file mode 100644 index 000000000..3bb318c17 --- /dev/null +++ b/src/cargo/core/resolver/encode.rs @@ -0,0 +1,434 @@ +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::fmt; +use std::str::FromStr; + +use serde::ser; +use serde::de; + +use core::{Dependency, Package, PackageId, SourceId, Workspace}; +use util::{internal, Graph}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +use super::Resolve; + +#[derive(Serialize, Deserialize, Debug)] +pub struct EncodableResolve { + package: Option>, + /// `root` is optional to allow backward compatibility. + root: Option, + metadata: Option, + + #[serde(default, skip_serializing_if = "Patch::is_empty")] patch: Patch, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +struct Patch { + unused: Vec, +} + +pub type Metadata = BTreeMap; + +impl EncodableResolve { + pub fn into_resolve(self, ws: &Workspace) -> CargoResult { + let path_deps = build_path_deps(ws); + + let packages = { + let mut packages = self.package.unwrap_or_default(); + if let Some(root) = self.root { + packages.insert(0, root); + } + packages + }; + + // `PackageId`s in the lock file don't include the `source` part + // for workspace members, so we reconstruct proper ids. + let (live_pkgs, all_pkgs) = { + let mut live_pkgs = HashMap::new(); + let mut all_pkgs = HashSet::new(); + for pkg in packages.iter() { + let enc_id = EncodablePackageId { + name: pkg.name.clone(), + version: pkg.version.clone(), + source: pkg.source.clone(), + }; + + if !all_pkgs.insert(enc_id.clone()) { + return Err(internal(format!( + "package `{}` is specified twice in the lockfile", + pkg.name + ))); + } + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + // We failed to find a local package in the workspace. + // It must have been removed and should be ignored. + None => { + debug!("path dependency now missing {} v{}", pkg.name, pkg.version); + continue; + } + Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?, + }; + + assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) + } + (live_pkgs, all_pkgs) + }; + + let lookup_id = |enc_id: &EncodablePackageId| -> CargoResult> { + match live_pkgs.get(enc_id) { + Some(&(ref id, _)) => Ok(Some(id.clone())), + None => if all_pkgs.contains(enc_id) { + // Package is found in the lockfile, but it is + // no longer a member of the workspace. + Ok(None) + } else { + Err(internal(format!( + "package `{}` is specified as a dependency, \ + but is missing from the package list", + enc_id + ))) + }, + } + }; + + let g = { + let mut g = Graph::new(); + + for &(ref id, _) in live_pkgs.values() { + g.add(id.clone(), &[]); + } + + for &(ref id, pkg) in live_pkgs.values() { + let deps = match pkg.dependencies { + Some(ref deps) => deps, + None => continue, + }; + + for edge in deps.iter() { + if let Some(to_depend_on) = lookup_id(edge)? { + g.link(id.clone(), to_depend_on); + } + } + } + g + }; + + let replacements = { + let mut replacements = HashMap::new(); + for &(ref id, pkg) in live_pkgs.values() { + if let Some(ref replace) = pkg.replace { + assert!(pkg.dependencies.is_none()); + if let Some(replace_id) = lookup_id(replace)? { + replacements.insert(id.clone(), replace_id); + } + } + } + replacements + }; + + let mut metadata = self.metadata.unwrap_or_default(); + + // Parse out all package checksums. After we do this we can be in a few + // situations: + // + // * We parsed no checksums. In this situation we're dealing with an old + // lock file and we're gonna fill them all in. + // * We parsed some checksums, but not one for all packages listed. It + // could have been the case that some were listed, then an older Cargo + // client added more dependencies, and now we're going to fill in the + // missing ones. + // * There are too many checksums listed, indicative of an older Cargo + // client removing a package but not updating the checksums listed. + // + // In all of these situations they're part of normal usage, so we don't + // really worry about it. We just try to slurp up as many checksums as + // possible. + let mut checksums = HashMap::new(); + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let enc_id: EncodablePackageId = k.parse() + .chain_err(|| internal("invalid encoding of checksum in lockfile"))?; + let id = match lookup_id(&enc_id) { + Ok(Some(id)) => id, + _ => continue, + }; + + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + + for k in to_remove { + metadata.remove(&k); + } + + let mut unused_patches = Vec::new(); + for pkg in self.patch.unused { + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + Some(src) => PackageId::new(&pkg.name, &pkg.version, src)?, + None => continue, + }; + unused_patches.push(id); + } + + Ok(Resolve { + graph: g, + empty_features: HashSet::new(), + features: HashMap::new(), + replacements, + checksums, + metadata, + unused_patches, + }) + } +} + +fn build_path_deps(ws: &Workspace) -> HashMap { + // If a crate is *not* a path source, then we're probably in a situation + // such as `cargo install` with a lock file from a remote dependency. In + // that case we don't need to fixup any path dependencies (as they're not + // actually path dependencies any more), so we ignore them. + let members = ws.members() + .filter(|p| p.package_id().source_id().is_path()) + .collect::>(); + + let mut ret = HashMap::new(); + let mut visited = HashSet::new(); + for member in members.iter() { + ret.insert( + member.package_id().name().to_string(), + member.package_id().source_id().clone(), + ); + visited.insert(member.package_id().source_id().clone()); + } + for member in members.iter() { + build_pkg(member, ws, &mut ret, &mut visited); + } + for deps in ws.root_patch().values() { + for dep in deps { + build_dep(dep, ws, &mut ret, &mut visited); + } + } + for &(_, ref dep) in ws.root_replace() { + build_dep(dep, ws, &mut ret, &mut visited); + } + + return ret; + + fn build_pkg( + pkg: &Package, + ws: &Workspace, + ret: &mut HashMap, + visited: &mut HashSet, + ) { + for dep in pkg.dependencies() { + build_dep(dep, ws, ret, visited); + } + } + + fn build_dep( + dep: &Dependency, + ws: &Workspace, + ret: &mut HashMap, + visited: &mut HashSet, + ) { + let id = dep.source_id(); + if visited.contains(id) || !id.is_path() { + return; + } + let path = match id.url().to_file_path() { + Ok(p) => p.join("Cargo.toml"), + Err(_) => return, + }; + let pkg = match ws.load(&path) { + Ok(p) => p, + Err(_) => return, + }; + ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone()); + visited.insert(pkg.package_id().source_id().clone()); + build_pkg(&pkg, ws, ret, visited); + } +} + +impl Patch { + fn is_empty(&self) -> bool { + self.unused.is_empty() + } +} + +#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodableDependency { + name: String, + version: String, + source: Option, + dependencies: Option>, + replace: Option, +} + +#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] +pub struct EncodablePackageId { + name: String, + version: String, + source: Option, +} + +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {}", self.name, self.version)?; + if let Some(ref s) = self.source { + write!(f, " ({})", s.to_url())?; + } + Ok(()) + } +} + +impl FromStr for EncodablePackageId { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut s = s.splitn(3, ' '); + let name = s.next().unwrap(); + let version = s.next() + .ok_or_else(|| internal("invalid serialized PackageId"))?; + let source_id = match s.next() { + Some(s) => { + if s.starts_with('(') && s.ends_with(')') { + Some(SourceId::from_url(&s[1..s.len() - 1])?) + } else { + bail!("invalid serialized PackageId") + } + } + None => None, + }; + + Ok(EncodablePackageId { + name: name.to_string(), + version: version.to_string(), + source: source_id, + }) + } +} + +impl ser::Serialize for EncodablePackageId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + s.collect_str(self) + } +} + +impl<'de> de::Deserialize<'de> for EncodablePackageId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + String::deserialize(d).and_then(|string| { + string + .parse::() + .map_err(de::Error::custom) + }) + } +} + +pub struct WorkspaceResolve<'a, 'cfg: 'a> { + pub ws: &'a Workspace<'cfg>, + pub resolve: &'a Resolve, +} + +impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect(); + ids.sort(); + + let encodable = ids.iter() + .filter_map(|&id| Some(encodable_resolve_node(id, self.resolve))) + .collect::>(); + + let mut metadata = self.resolve.metadata.clone(); + + for id in ids.iter().filter(|id| !id.source_id().is_path()) { + let checksum = match self.resolve.checksums[*id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id); + metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string()); + } + + let metadata = if metadata.is_empty() { + None + } else { + Some(metadata) + }; + + let patch = Patch { + unused: self.resolve + .unused_patches() + .iter() + .map(|id| EncodableDependency { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()), + dependencies: None, + replace: None, + }) + .collect(), + }; + EncodableResolve { + package: Some(encodable), + root: None, + metadata, + patch, + }.serialize(s) + } +} + +fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency { + let (replace, deps) = match resolve.replacement(id) { + Some(id) => (Some(encodable_package_id(id)), None), + None => { + let mut deps = resolve + .graph + .edges(id) + .into_iter() + .flat_map(|a| a) + .map(encodable_package_id) + .collect::>(); + deps.sort(); + (None, Some(deps)) + } + }; + + EncodableDependency { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()), + dependencies: deps, + replace, + } +} + +fn encodable_package_id(id: &PackageId) -> EncodablePackageId { + EncodablePackageId { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()).map(|s| s.with_precise(None)), + } +} + +fn encode_source(id: &SourceId) -> Option { + if id.is_path() { + None + } else { + Some(id.clone()) + } +} diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs new file mode 100644 index 000000000..78a9afea2 --- /dev/null +++ b/src/cargo/core/resolver/mod.rs @@ -0,0 +1,2074 @@ +//! Resolution of the entire dependency graph for a crate +//! +//! This module implements the core logic in taking the world of crates and +//! constraints and creating a resolved graph with locked versions for all +//! crates and their dependencies. This is separate from the registry module +//! which is more worried about discovering crates from various sources, this +//! module just uses the Registry trait as a source to learn about crates from. +//! +//! Actually solving a constraint graph is an NP-hard problem. This algorithm +//! is basically a nice heuristic to make sure we get roughly the best answer +//! most of the time. The constraints that we're working with are: +//! +//! 1. Each crate can have any number of dependencies. Each dependency can +//! declare a version range that it is compatible with. +//! 2. Crates can be activated with multiple version (e.g. show up in the +//! dependency graph twice) so long as each pairwise instance have +//! semver-incompatible versions. +//! +//! The algorithm employed here is fairly simple, we simply do a DFS, activating +//! the "newest crate" (highest version) first and then going to the next +//! option. The heuristics we employ are: +//! +//! * Never try to activate a crate version which is incompatible. This means we +//! only try crates which will actually satisfy a dependency and we won't ever +//! try to activate a crate that's semver compatible with something else +//! activated (as we're only allowed to have one) nor try to activate a crate +//! that has the same links attribute as something else +//! activated. +//! * Always try to activate the highest version crate first. The default +//! dependency in Cargo (e.g. when you write `foo = "0.1.2"`) is +//! semver-compatible, so selecting the highest version possible will allow us +//! to hopefully satisfy as many dependencies at once. +//! +//! Beyond that, what's implemented below is just a naive backtracking version +//! which should in theory try all possible combinations of dependencies and +//! versions to see if one works. The first resolution that works causes +//! everything to bail out immediately and return success, and only if *nothing* +//! works do we actually return an error up the stack. +//! +//! ## Performance +//! +//! Note that this is a relatively performance-critical portion of Cargo. The +//! data that we're processing is proportional to the size of the dependency +//! graph, which can often be quite large (e.g. take a look at Servo). To make +//! matters worse the DFS algorithm we're implemented is inherently quite +//! inefficient. When we add the requirement of backtracking on top it means +//! that we're implementing something that probably shouldn't be allocating all +//! over the place. + +use std::cmp::Ordering; +use std::collections::{BTreeMap, BinaryHeap, HashMap, HashSet}; +use std::fmt; +use std::iter::FromIterator; +use std::mem; +use std::ops::Range; +use std::rc::Rc; +use std::time::{Duration, Instant}; + +use semver; +use url::Url; + +use core::{Dependency, PackageId, Registry, SourceId, Summary}; +use core::PackageIdSpec; +use core::interning::InternedString; +use util::config::Config; +use util::Graph; +use util::errors::{CargoError, CargoResult}; +use util::profile; +use util::graph::{Edges, Nodes}; + +pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; +pub use self::encode::{Metadata, WorkspaceResolve}; + +mod encode; +mod conflict_cache; + +/// Represents a fully resolved package dependency graph. Each node in the graph +/// is a package and edges represent dependencies between packages. +/// +/// Each instance of `Resolve` also understands the full set of features used +/// for each package. +#[derive(PartialEq)] +pub struct Resolve { + graph: Graph, + replacements: HashMap, + empty_features: HashSet, + features: HashMap>, + checksums: HashMap>, + metadata: Metadata, + unused_patches: Vec, +} + +pub struct Deps<'a> { + edges: Option>, + resolve: &'a Resolve, +} + +pub struct DepsNotReplaced<'a> { + edges: Option>, +} + +#[derive(Clone, Copy)] +pub enum Method<'a> { + Everything, // equivalent to Required { dev_deps: true, all_features: true, .. } + Required { + dev_deps: bool, + features: &'a [String], + all_features: bool, + uses_default_features: bool, + }, +} + +impl<'r> Method<'r> { + pub fn split_features(features: &[String]) -> Vec { + features + .iter() + .flat_map(|s| s.split_whitespace()) + .flat_map(|s| s.split(',')) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()) + .collect::>() + } +} + +// Information about the dependencies for a crate, a tuple of: +// +// (dependency info, candidates, features activated) +type DepInfo = (Dependency, Rc>, Rc>); + +#[derive(Clone)] +struct Candidate { + summary: Summary, + replace: Option, +} + +impl Resolve { + /// Resolves one of the paths from the given dependent package up to + /// the root. + pub fn path_to_top<'a>(&'a self, pkg: &'a PackageId) -> Vec<&'a PackageId> { + self.graph.path_to_top(pkg) + } + pub fn register_used_patches(&mut self, patches: &HashMap>) { + for summary in patches.values().flat_map(|v| v) { + if self.iter().any(|id| id == summary.package_id()) { + continue; + } + self.unused_patches.push(summary.package_id().clone()); + } + } + + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package id has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacment, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue; + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + bail!( + "\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", + id, + id.source_id() + ) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + bail!( + "\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` is the same as when the lockfile was generated +", + id, + id.source_id() + ) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + bail!( + "\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` is the same as when the lockfile was generated +", + id + ); + } + } + } + + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + Ok(()) + } + + pub fn iter(&self) -> Nodes { + self.graph.iter() + } + + pub fn deps(&self, pkg: &PackageId) -> Deps { + Deps { + edges: self.graph.edges(pkg), + resolve: self, + } + } + + pub fn deps_not_replaced(&self, pkg: &PackageId) -> DepsNotReplaced { + DepsNotReplaced { + edges: self.graph.edges(pkg), + } + } + + pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> { + self.replacements.get(pkg) + } + + pub fn replacements(&self) -> &HashMap { + &self.replacements + } + + pub fn features(&self, pkg: &PackageId) -> &HashSet { + self.features.get(pkg).unwrap_or(&self.empty_features) + } + + pub fn features_sorted(&self, pkg: &PackageId) -> Vec<&str> { + let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref())); + v.sort(); + v + } + + pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { + PackageIdSpec::query_str(spec, self.iter()) + } + + pub fn unused_patches(&self) -> &[PackageId] { + &self.unused_patches + } +} + +impl fmt::Debug for Resolve { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "graph: {:?}\n", self.graph)?; + write!(fmt, "\nfeatures: {{\n")?; + for (pkg, features) in &self.features { + write!(fmt, " {}: {:?}\n", pkg, features)?; + } + write!(fmt, "}}") + } +} + +impl<'a> Iterator for Deps<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges + .as_mut() + .and_then(|e| e.next()) + .map(|id| self.resolve.replacement(id).unwrap_or(id)) + } +} + +impl<'a> Iterator for DepsNotReplaced<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges.as_mut().and_then(|e| e.next()) + } +} + +struct RcList { + head: Option)>>, +} + +impl RcList { + fn new() -> RcList { + RcList { head: None } + } + + fn push(&mut self, data: T) { + let node = Rc::new(( + data, + RcList { + head: self.head.take(), + }, + )); + self.head = Some(node); + } +} + +// Not derived to avoid `T: Clone` +impl Clone for RcList { + fn clone(&self) -> RcList { + RcList { + head: self.head.clone(), + } + } +} + +// Avoid stack overflows on drop by turning recursion into a loop +impl Drop for RcList { + fn drop(&mut self) { + let mut cur = self.head.take(); + while let Some(head) = cur { + match Rc::try_unwrap(head) { + Ok((_data, mut next)) => cur = next.head.take(), + Err(_) => break, + } + } + } +} + +enum GraphNode { + Add(PackageId), + Link(PackageId, PackageId), +} + +// A `Context` is basically a bunch of local resolution information which is +// kept around for all `BacktrackFrame` instances. As a result, this runs the +// risk of being cloned *a lot* so we want to make this as cheap to clone as +// possible. +#[derive(Clone)] +struct Context { + // TODO: Both this and the two maps below are super expensive to clone. We should + // switch to persistent hash maps if we can at some point or otherwise + // make these much cheaper to clone in general. + activations: Activations, + resolve_features: HashMap>, + links: HashMap, + + // These are two cheaply-cloneable lists (O(1) clone) which are effectively + // hash maps but are built up as "construction lists". We'll iterate these + // at the very end and actually construct the map that we're making. + resolve_graph: RcList, + resolve_replacements: RcList<(PackageId, PackageId)>, + + // These warnings are printed after resolution. + warnings: RcList, +} + +type Activations = HashMap<(InternedString, SourceId), Rc>>; + +/// Builds the list of all packages required to build the first argument. +/// +/// * `summaries` - the list of package summaries along with how to resolve +/// their features. This is a list of all top-level packages that are intended +/// to be part of the lock file (resolve output). These typically are a list +/// of all workspace members. +/// +/// * `replacements` - this is a list of `[replace]` directives found in the +/// root of the workspace. The list here is a `PackageIdSpec` of what to +/// replace and a `Dependency` to replace that with. In general it's not +/// recommended to use `[replace]` any more and use `[patch]` instead, which +/// is supported elsewhere. +/// +/// * `registry` - this is the source from which all package summaries are +/// loaded. It's expected that this is extensively configured ahead of time +/// and is idempotent with our requests to it (aka returns the same results +/// for the same query every time). Typically this is an instance of a +/// `PackageRegistry`. +/// +/// * `try_to_use` - this is a list of package ids which were previously found +/// in the lock file. We heuristically prefer the ids listed in `try_to_use` +/// when sorting candidates to activate, but otherwise this isn't used +/// anywhere else. +/// +/// * `config` - a location to print warnings and such, or `None` if no warnings +/// should be printed +/// +/// * `print_warnings` - whether or not to print backwards-compatibility +/// warnings and such +pub fn resolve( + summaries: &[(Summary, Method)], + replacements: &[(PackageIdSpec, Dependency)], + registry: &mut Registry, + try_to_use: &HashSet<&PackageId>, + config: Option<&Config>, + print_warnings: bool, +) -> CargoResult { + let cx = Context { + resolve_graph: RcList::new(), + resolve_features: HashMap::new(), + links: HashMap::new(), + resolve_replacements: RcList::new(), + activations: HashMap::new(), + warnings: RcList::new(), + }; + let _p = profile::start("resolving"); + let minimal_versions = match config { + Some(config) => config.cli_unstable().minimal_versions, + None => false, + }; + let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions); + let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; + + let mut resolve = Resolve { + graph: cx.graph(), + empty_features: HashSet::new(), + checksums: HashMap::new(), + metadata: BTreeMap::new(), + replacements: cx.resolve_replacements(), + features: cx.resolve_features + .iter() + .map(|(k, v)| (k.clone(), v.iter().map(|x| x.to_string()).collect())) + .collect(), + unused_patches: Vec::new(), + }; + + for summary in cx.activations.values().flat_map(|v| v.iter()) { + let cksum = summary.checksum().map(|s| s.to_string()); + resolve + .checksums + .insert(summary.package_id().clone(), cksum); + } + + check_cycles(&resolve, &cx.activations)?; + trace!("resolved: {:?}", resolve); + + // If we have a shell, emit warnings about required deps used as feature. + if let Some(config) = config { + if print_warnings { + let mut shell = config.shell(); + let mut warnings = &cx.warnings; + while let Some(ref head) = warnings.head { + shell.warn(&head.0)?; + warnings = &head.1; + } + } + } + + Ok(resolve) +} + +/// Attempts to activate the summary `candidate` in the context `cx`. +/// +/// This function will pull dependency summaries from the registry provided, and +/// the dependencies of the package will be determined by the `method` provided. +/// If `candidate` was activated, this function returns the dependency frame to +/// iterate through next. +fn activate( + cx: &mut Context, + registry: &mut RegistryQueryer, + parent: Option<&Summary>, + candidate: Candidate, + method: &Method, +) -> ActivateResult> { + if let Some(parent) = parent { + cx.resolve_graph.push(GraphNode::Link( + parent.package_id().clone(), + candidate.summary.package_id().clone(), + )); + } + + let activated = cx.flag_activated(&candidate.summary, method)?; + + let candidate = match candidate.replace { + Some(replace) => { + cx.resolve_replacements.push(( + candidate.summary.package_id().clone(), + replace.package_id().clone(), + )); + if cx.flag_activated(&replace, method)? && activated { + return Ok(None); + } + trace!( + "activating {} (replacing {})", + replace.package_id(), + candidate.summary.package_id() + ); + replace + } + None => { + if activated { + return Ok(None); + } + trace!("activating {}", candidate.summary.package_id()); + candidate.summary + } + }; + + let now = Instant::now(); + let deps = cx.build_deps(registry, parent, &candidate, method)?; + let frame = DepsFrame { + parent: candidate, + just_for_error_messages: false, + remaining_siblings: RcVecIter::new(Rc::new(deps)), + }; + Ok(Some((frame, now.elapsed()))) +} + +struct RcVecIter { + vec: Rc>, + rest: Range, +} + +impl RcVecIter { + fn new(vec: Rc>) -> RcVecIter { + RcVecIter { + rest: 0..vec.len(), + vec, + } + } +} + +// Not derived to avoid `T: Clone` +impl Clone for RcVecIter { + fn clone(&self) -> RcVecIter { + RcVecIter { + vec: self.vec.clone(), + rest: self.rest.clone(), + } + } +} + +impl Iterator for RcVecIter +where + T: Clone, +{ + type Item = (usize, T); + + fn next(&mut self) -> Option<(usize, T)> { + self.rest + .next() + .and_then(|i| self.vec.get(i).map(|val| (i, val.clone()))) + } + + fn size_hint(&self) -> (usize, Option) { + self.rest.size_hint() + } +} + +#[derive(Clone)] +struct DepsFrame { + parent: Summary, + just_for_error_messages: bool, + remaining_siblings: RcVecIter, +} + +impl DepsFrame { + /// Returns the least number of candidates that any of this frame's siblings + /// has. + /// + /// The `remaining_siblings` array is already sorted with the smallest + /// number of candidates at the front, so we just return the number of + /// candidates in that entry. + fn min_candidates(&self) -> usize { + self.remaining_siblings + .clone() + .next() + .map(|(_, (_, candidates, _))| candidates.len()) + .unwrap_or(0) + } + + fn flatten<'s>(&'s self) -> Box + 's> { + // TODO: with impl Trait the Box can be removed + Box::new( + self.remaining_siblings + .clone() + .map(move |(_, (d, _, _))| (self.parent.package_id(), d)), + ) + } +} + +impl PartialEq for DepsFrame { + fn eq(&self, other: &DepsFrame) -> bool { + self.just_for_error_messages == other.just_for_error_messages + && self.min_candidates() == other.min_candidates() + } +} + +impl Eq for DepsFrame {} + +impl PartialOrd for DepsFrame { + fn partial_cmp(&self, other: &DepsFrame) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for DepsFrame { + fn cmp(&self, other: &DepsFrame) -> Ordering { + self.just_for_error_messages + .cmp(&other.just_for_error_messages) + .then_with(|| + // the frame with the sibling that has the least number of candidates + // needs to get bubbled up to the top of the heap we use below, so + // reverse comparison here. + self.min_candidates().cmp(&other.min_candidates()).reverse()) + } +} + +/// All possible reasons that a package might fail to activate. +/// +/// We maintain a list of conflicts for error reporting as well as backtracking +/// purposes. Each reason here is why candidates may be rejected or why we may +/// fail to resolve a dependency. +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +enum ConflictReason { + /// There was a semver conflict, for example we tried to activate a package + /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version + /// is already activated) + Semver, + + /// The `links` key is being violated. For example one crate in the + /// dependency graph has `links = "foo"` but this crate also had that, and + /// we're only allowed one per dependency graph. + Links(String), + + /// A dependency listed features that weren't actually available on the + /// candidate. For example we tried to activate feature `foo` but the + /// candidiate we're activating didn't actually have the feature `foo`. + MissingFeatures(String), +} + +enum ActivateError { + Fatal(CargoError), + Conflict(PackageId, ConflictReason), +} + +type ActivateResult = Result; + +impl From<::failure::Error> for ActivateError { + fn from(t: ::failure::Error) -> Self { + ActivateError::Fatal(t) + } +} + +impl From<(PackageId, ConflictReason)> for ActivateError { + fn from(t: (PackageId, ConflictReason)) -> Self { + ActivateError::Conflict(t.0, t.1) + } +} + +impl ConflictReason { + fn is_links(&self) -> bool { + if let ConflictReason::Links(_) = *self { + return true; + } + false + } + + fn is_missing_features(&self) -> bool { + if let ConflictReason::MissingFeatures(_) = *self { + return true; + } + false + } +} + +struct RegistryQueryer<'a> { + registry: &'a mut (Registry + 'a), + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet<&'a PackageId>, + // TODO: with nll the Rc can be removed + cache: HashMap>>, + // If set the list of dependency candidates will be sorted by minimal + // versions first. That allows `cargo update -Z minimal-versions` which will + // specify minimum depedency versions to be used. + minimal_versions: bool, +} + +impl<'a> RegistryQueryer<'a> { + fn new( + registry: &'a mut Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet<&'a PackageId>, + minimal_versions: bool, + ) -> Self { + RegistryQueryer { + registry, + replacements, + cache: HashMap::new(), + try_to_use, + minimal_versions, + } + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + fn query(&mut self, dep: &Dependency) -> CargoResult>> { + if let Some(out) = self.cache.get(dep).cloned() { + return Ok(out); + } + + let mut ret = Vec::new(); + self.registry.query(dep, &mut |s| { + ret.push(Candidate { + summary: s, + replace: None, + }); + })?; + for candidate in ret.iter_mut() { + let summary = &candidate.summary; + + let mut potential_matches = self.replacements + .iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!("found an override for {} {}", dep.name(), dep.version_req()); + + let mut summaries = self.registry.query_vec(dep)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + format_err!( + "no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, + dep.source_id(), + dep.version_req() + ) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries + .iter() + .map(|s| format!(" * {}", s.package_id())) + .collect::>(); + bail!( + "the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", + spec, + s.package_id(), + bullets.join("\n") + ); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + bail!( + "overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, + spec, + summary.package_id() + ); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.name(), dep.version_req()); + } + + candidate.replace = replace; + } + + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(a.summary.package_id()); + let b_in_previous = self.try_to_use.contains(b.summary.package_id()); + let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); + match previous_cmp { + Ordering::Equal => { + let cmp = a.summary.version().cmp(&b.summary.version()); + if self.minimal_versions == true { + // Lower version ordered first. + cmp + } else { + // Higher version ordered first. + cmp.reverse() + } + } + _ => previous_cmp, + } + }); + + let out = Rc::new(ret); + + self.cache.insert(dep.clone(), out.clone()); + + Ok(out) + } +} + +#[derive(Clone)] +struct BacktrackFrame { + cur: usize, + context_backup: Context, + deps_backup: BinaryHeap, + remaining_candidates: RemainingCandidates, + parent: Summary, + dep: Dependency, + features: Rc>, + conflicting_activations: HashMap, +} + +/// A helper "iterator" used to extract candidates within a current `Context` of +/// a dependency graph. +/// +/// This struct doesn't literally implement the `Iterator` trait (requires a few +/// more inputs) but in general acts like one. Each `RemainingCandidates` is +/// created with a list of candidates to choose from. When attempting to iterate +/// over the list of candidates only *valid* candidates are returned. Validity +/// is defined within a `Context`. +/// +/// Candidates passed to `new` may not be returned from `next` as they could be +/// filtered out, and if iteration stops a map of all packages which caused +/// filtered out candidates to be filtered out will be returned. +#[derive(Clone)] +struct RemainingCandidates { + remaining: RcVecIter, + // note: change to RcList or something if clone is to expensive + conflicting_prev_active: HashMap, + // This is a inlined peekable generator + has_another: Option, +} + +impl RemainingCandidates { + fn new(candidates: &Rc>) -> RemainingCandidates { + RemainingCandidates { + remaining: RcVecIter::new(Rc::clone(candidates)), + conflicting_prev_active: HashMap::new(), + has_another: None, + } + } + + /// Attempts to find another candidate to check from this list. + /// + /// This method will attempt to move this iterator forward, returning a + /// candidate that's possible to activate. The `cx` argument is the current + /// context which determines validity for candidates returned, and the `dep` + /// is the dependency listing that we're activating for. + /// + /// If successful a `(Candidate, bool)` pair will be returned. The + /// `Candidate` is the candidate to attempt to activate, and the `bool` is + /// an indicator of whether there are remaining candidates to try of if + /// we've reached the end of iteration. + /// + /// If we've reached the end of the iterator here then `Err` will be + /// returned. The error will contain a map of package id to conflict reason, + /// where each package id caused a candidate to be filtered out from the + /// original list for the reason listed. + fn next( + &mut self, + cx: &Context, + dep: &Dependency, + ) -> Result<(Candidate, bool), HashMap> { + let prev_active = cx.prev_active(dep); + + for (_, b) in self.remaining.by_ref() { + // The `links` key in the manifest dictates that there's only one + // package in a dependency graph, globally, with that particular + // `links` key. If this candidate links to something that's already + // linked to by a different package then we've gotta skip this. + if let Some(link) = b.summary.links() { + if let Some(a) = cx.links.get(&link) { + if a != b.summary.package_id() { + self.conflicting_prev_active + .entry(a.clone()) + .or_insert_with(|| ConflictReason::Links(link.to_string())); + continue; + } + } + } + + // Otherwise the condition for being a valid candidate relies on + // semver. Cargo dictates that you can't duplicate multiple + // semver-compatible versions of a crate. For example we can't + // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can, + // however, activate `1.0.2` and `2.0.0`. + // + // Here we throw out our candidate if it's *compatible*, yet not + // equal, to all previously activated versions. + if let Some(a) = prev_active + .iter() + .find(|a| compatible(a.version(), b.summary.version())) + { + if *a != b.summary { + self.conflicting_prev_active + .entry(a.package_id().clone()) + .or_insert(ConflictReason::Semver); + continue; + } + } + + // Well if we made it this far then we've got a valid dependency. We + // want this iterator to be inherently "peekable" so we don't + // necessarily return the item just yet. Instead we stash it away to + // get returned later, and if we replaced something then that was + // actually the candidate to try first so we return that. + if let Some(r) = mem::replace(&mut self.has_another, Some(b)) { + return Ok((r, true)); + } + } + + // Alright we've entirely exhausted our list of candidates. If we've got + // something stashed away return that here (also indicating that there's + // nothign else). If nothing is stashed away we return the list of all + // conflicting activations, if any. + // + // TODO: can the `conflicting_prev_active` clone be avoided here? should + // panic if this is called twice and an error is already returned + self.has_another + .take() + .map(|r| (r, false)) + .ok_or_else(|| self.conflicting_prev_active.clone()) + } +} +/// Recursively activates the dependencies for `top`, in depth-first order, +/// backtracking across possible candidates for each dependency as necessary. +/// +/// If all dependencies can be activated and resolved to a version in the +/// dependency graph, cx.resolve is returned. +fn activate_deps_loop( + mut cx: Context, + registry: &mut RegistryQueryer, + summaries: &[(Summary, Method)], + config: Option<&Config>, +) -> CargoResult { + // Note that a `BinaryHeap` is used for the remaining dependencies that need + // activation. This heap is sorted such that the "largest value" is the most + // constrained dependency, or the one with the least candidates. + // + // This helps us get through super constrained portions of the dependency + // graph quickly and hopefully lock down what later larger dependencies can + // use (those with more candidates). + let mut backtrack_stack = Vec::new(); + let mut remaining_deps = BinaryHeap::new(); + + // `past_conflicting_activations` is a cache of the reasons for each time we + // backtrack. + let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); + + // Activate all the initial summaries to kick off some work. + for &(ref summary, ref method) in summaries { + debug!("initial activation: {}", summary.package_id()); + let candidate = Candidate { + summary: summary.clone(), + replace: None, + }; + let res = activate(&mut cx, registry, None, candidate, method); + match res { + Ok(Some((frame, _))) => remaining_deps.push(frame), + Ok(None) => (), + Err(ActivateError::Fatal(e)) => return Err(e), + Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"), + } + } + + let mut ticks = 0; + let start = Instant::now(); + let time_to_print = Duration::from_millis(500); + let mut printed = false; + let mut deps_time = Duration::new(0, 0); + + // Main resolution loop, this is the workhorse of the resolution algorithm. + // + // You'll note that a few stacks are maintained on the side, which might + // seem odd when this algorithm looks like it could be implemented + // recursively. While correct, this is implemented iteratively to avoid + // blowing the stack (the recursion depth is proportional to the size of the + // input). + // + // The general sketch of this loop is to run until there are no dependencies + // left to activate, and for each dependency to attempt to activate all of + // its own dependencies in turn. The `backtrack_stack` is a side table of + // backtracking states where if we hit an error we can return to in order to + // attempt to continue resolving. + while let Some(mut deps_frame) = remaining_deps.pop() { + // If we spend a lot of time here (we shouldn't in most cases) then give + // a bit of a visual indicator as to what we're doing. Only enable this + // when stderr is a tty (a human is likely to be watching) to ensure we + // get deterministic output otherwise when observed by tools. + // + // Also note that we hit this loop a lot, so it's fairly performance + // sensitive. As a result try to defer a possibly expensive operation + // like `Instant::now` by only checking every N iterations of this loop + // to amortize the cost of the current time lookup. + ticks += 1; + if let Some(config) = config { + if config.shell().is_err_tty() && !printed && ticks % 1000 == 0 + && start.elapsed() - deps_time > time_to_print + { + printed = true; + config.shell().status("Resolving", "dependency graph...")?; + } + } + + let just_here_for_the_error_messages = deps_frame.just_for_error_messages; + + // Figure out what our next dependency to activate is, and if nothing is + // listed then we're entirely done with this frame (yay!) and we can + // move on to the next frame. + let frame = match deps_frame.remaining_siblings.next() { + Some(sibling) => { + let parent = Summary::clone(&deps_frame.parent); + remaining_deps.push(deps_frame); + (parent, sibling) + } + None => continue, + }; + let (mut parent, (mut cur, (mut dep, candidates, mut features))) = frame; + assert!(!remaining_deps.is_empty()); + + trace!( + "{}[{}]>{} {} candidates", + parent.name(), + cur, + dep.name(), + candidates.len() + ); + trace!( + "{}[{}]>{} {} prev activations", + parent.name(), + cur, + dep.name(), + cx.prev_active(&dep).len() + ); + + let just_here_for_the_error_messages = just_here_for_the_error_messages + && past_conflicting_activations + .conflicting(&cx, &dep) + .is_some(); + + let mut remaining_candidates = RemainingCandidates::new(&candidates); + + // `conflicting_activations` stores all the reasons we were unable to + // activate candidates. One of these reasons will have to go away for + // backtracking to find a place to restart. It is also the list of + // things to explain in the error message if we fail to resolve. + // + // This is a map of package id to a reason why that packaged caused a + // conflict for us. + let mut conflicting_activations = HashMap::new(); + + // When backtracking we don't fully update `conflicting_activations` + // especially for the cases that we didn't make a backtrack frame in the + // first place. This `backtracked` var stores whether we are continuing + // from a restored backtrack frame so that we can skip caching + // `conflicting_activations` in `past_conflicting_activations` + let mut backtracked = false; + + loop { + let next = remaining_candidates.next(&cx, &dep); + + let (candidate, has_another) = next.or_else(|conflicting| { + // If we get here then our `remaining_candidates` was just + // exhausted, so `dep` failed to activate. + // + // It's our job here to backtrack, if possible, and find a + // different candidate to activate. If we can't find any + // candidates whatsoever then it's time to bail entirely. + trace!("{}[{}]>{} -- no candidates", parent.name(), cur, dep.name()); + + // Add all the reasons to our frame's list of conflicting + // activations, as we may use this to start backtracking later. + conflicting_activations.extend(conflicting); + + // Use our list of `conflicting_activations` to add to our + // global list of past conflicting activations, effectively + // globally poisoning `dep` if `conflicting_activations` ever + // shows up again. We'll use the `past_conflicting_activations` + // below to determine if a dependency is poisoned and skip as + // much work as possible. + // + // If we're only here for the error messages then there's no + // need to try this as this dependency is already known to be + // bad. + // + // As we mentioned above with the `backtracked` variable if this + // local is set to `true` then our `conflicting_activations` may + // not be right, so we can't push into our global cache. + if !just_here_for_the_error_messages && !backtracked { + past_conflicting_activations.insert(&dep, &conflicting_activations); + } + + match find_candidate(&mut backtrack_stack, &parent, &conflicting_activations) { + Some((candidate, has_another, frame)) => { + // Reset all of our local variables used with the + // contents of `frame` to complete our backtrack. + cur = frame.cur; + cx = frame.context_backup; + remaining_deps = frame.deps_backup; + remaining_candidates = frame.remaining_candidates; + parent = frame.parent; + dep = frame.dep; + features = frame.features; + conflicting_activations = frame.conflicting_activations; + backtracked = true; + Ok((candidate, has_another)) + } + None => Err(activation_error( + &cx, + registry.registry, + &parent, + &dep, + &conflicting_activations, + &candidates, + config, + )), + } + })?; + + // If we're only here for the error messages then we know that this + // activation will fail one way or another. To that end if we've got + // more candidates we want to fast-forward to the last one as + // otherwise we'll just backtrack here anyway (helping us to skip + // some work). + if just_here_for_the_error_messages && !backtracked && has_another { + continue; + } + + // We have a `candidate`. Create a `BacktrackFrame` so we can add it + // to the `backtrack_stack` later if activation succeeds. + // + // Note that if we don't actually have another candidate then there + // will be nothing to backtrack to so we skip construction of the + // frame. This is a relatively important optimization as a number of + // the `clone` calls below can be quite expensive, so we avoid them + // if we can. + let backtrack = if has_another { + Some(BacktrackFrame { + cur, + context_backup: Context::clone(&cx), + deps_backup: >::clone(&remaining_deps), + remaining_candidates: remaining_candidates.clone(), + parent: Summary::clone(&parent), + dep: Dependency::clone(&dep), + features: Rc::clone(&features), + conflicting_activations: conflicting_activations.clone(), + }) + } else { + None + }; + + let pid = candidate.summary.package_id().clone(); + let method = Method::Required { + dev_deps: false, + features: &features, + all_features: false, + uses_default_features: dep.uses_default_features(), + }; + trace!( + "{}[{}]>{} trying {}", + parent.name(), + cur, + dep.name(), + candidate.summary.version() + ); + let res = activate(&mut cx, registry, Some(&parent), candidate, &method); + + let successfully_activated = match res { + // Success! We've now activated our `candidate` in our context + // and we're almost ready to move on. We may want to scrap this + // frame in the end if it looks like it's not going to end well, + // so figure that out here. + Ok(Some((mut frame, dur))) => { + deps_time += dur; + + // Our `frame` here is a new package with its own list of + // dependencies. Do a sanity check here of all those + // dependencies by cross-referencing our global + // `past_conflicting_activations`. Recall that map is a + // global cache which lists sets of packages where, when + // activated, the dependency is unresolvable. + // + // If any our our frame's dependencies fit in that bucket, + // aka known unresolvable, then we extend our own set of + // conflicting activations with theirs. We can do this + // because the set of conflicts we found implies the + // dependency can't be activated which implies that we + // ourselves can't be activated, so we know that they + // conflict with us. + let mut has_past_conflicting_dep = just_here_for_the_error_messages; + if !has_past_conflicting_dep { + if let Some(conflicting) = frame + .remaining_siblings + .clone() + .filter_map(|(_, (ref new_dep, _, _))| { + past_conflicting_activations.conflicting(&cx, &new_dep) + }) + .next() + { + // If one of our deps is known unresolvable + // then we will not succeed. + // How ever if we are part of the reason that + // one of our deps conflicts then + // we can make a stronger statement + // because we will definitely be activated when + // we try our dep. + conflicting_activations.extend( + conflicting + .iter() + .filter(|&(p, _)| p != &pid) + .map(|(p, r)| (p.clone(), r.clone())), + ); + + has_past_conflicting_dep = true; + } + } + // If any of `remaining_deps` are known unresolvable with + // us activated, then we extend our own set of + // conflicting activations with theirs and its parent. We can do this + // because the set of conflicts we found implies the + // dependency can't be activated which implies that we + // ourselves are incompatible with that dep, so we know that deps + // parent conflict with us. + if !has_past_conflicting_dep { + if let Some(known_related_bad_deps) = + past_conflicting_activations.dependencies_conflicting_with(&pid) + { + if let Some((other_parent, conflict)) = remaining_deps + .iter() + .flat_map(|other| other.flatten()) + // for deps related to us + .filter(|&(_, ref other_dep)| + known_related_bad_deps.contains(other_dep)) + .filter_map(|(other_parent, other_dep)| { + past_conflicting_activations + .find_conflicting( + &cx, + &other_dep, + |con| con.contains_key(&pid) + ) + .map(|con| (other_parent, con)) + }) + .next() + { + let rel = conflict.get(&pid).unwrap().clone(); + + // The conflict we found is + // "other dep will not succeed if we are activated." + // We want to add + // "our dep will not succeed if other dep is in remaining_deps" + // but that is not how the cache is set up. + // So we add the less general but much faster, + // "our dep will not succeed if other dep's parent is activated". + conflicting_activations.extend( + conflict + .iter() + .filter(|&(p, _)| p != &pid) + .map(|(p, r)| (p.clone(), r.clone())), + ); + conflicting_activations.insert(other_parent.clone(), rel); + has_past_conflicting_dep = true; + } + } + } + + // Ok if we're in a "known failure" state for this frame we + // may want to skip it altogether though. We don't want to + // skip it though in the case that we're displaying error + // messages to the user! + // + // Here we need to figure out if the user will see if we + // skipped this candidate (if it's known to fail, aka has a + // conflicting dep and we're the last candidate). If we're + // here for the error messages, we can't skip it (but we can + // prune extra work). If we don't have any candidates in our + // backtrack stack then we're the last line of defense, so + // we'll want to present an error message for sure. + let activate_for_error_message = has_past_conflicting_dep && !has_another && { + just_here_for_the_error_messages || { + conflicting_activations + .extend(remaining_candidates.conflicting_prev_active.clone()); + find_candidate( + &mut backtrack_stack.clone(), + &parent, + &conflicting_activations, + ).is_none() + } + }; + + // If we're only here for the error messages then we know + // one of our candidate deps will fail, meaning we will + // fail and that none of the backtrack frames will find a + // candidate that will help. Consequently let's clean up the + // no longer needed backtrack frames. + if activate_for_error_message { + backtrack_stack.clear(); + } + + // If we don't know for a fact that we'll fail or if we're + // just here for the error message then we push this frame + // onto our list of to-be-resolve, which will generate more + // work for us later on. + // + // Otherwise we're guaranteed to fail and were not here for + // error messages, so we skip work and don't push anything + // onto our stack. + frame.just_for_error_messages = has_past_conflicting_dep; + if !has_past_conflicting_dep || activate_for_error_message { + remaining_deps.push(frame); + true + } else { + trace!( + "{}[{}]>{} skipping {} ", + parent.name(), + cur, + dep.name(), + pid.version() + ); + false + } + } + + // This candidate's already activated, so there's no extra work + // for us to do. Let's keep going. + Ok(None) => true, + + // We failed with a super fatal error (like a network error), so + // bail out as quickly as possible as we can't reliably + // backtrack from errors like these + Err(ActivateError::Fatal(e)) => return Err(e), + + // We failed due to a bland conflict, bah! Record this in our + // frame's list of conflicting activations as to why this + // candidate failed, and then move on. + Err(ActivateError::Conflict(id, reason)) => { + conflicting_activations.insert(id, reason); + false + } + }; + + // If we've successfully activated then save off the backtrack frame + // if one was created, and otherwise break out of the inner + // activation loop as we're ready to move to the next dependency + if successfully_activated { + backtrack_stack.extend(backtrack); + break; + } + + // We've failed to activate this dependency, oh dear! Our call to + // `activate` above may have altered our `cx` local variable, so + // restore it back if we've got a backtrack frame. + // + // If we don't have a backtrack frame then we're just using the `cx` + // for error messages anyway so we can live with a little + // imprecision. + if let Some(b) = backtrack { + cx = b.context_backup; + } + } + + // Ok phew, that loop was a big one! If we've broken out then we've + // successfully activated a candidate. Our stacks are all in place that + // we're ready to move on to the next dependency that needs activation, + // so loop back to the top of the function here. + } + + Ok(cx) +} + +/// Looks through the states in `backtrack_stack` for dependencies with +/// remaining candidates. For each one, also checks if rolling back +/// could change the outcome of the failed resolution that caused backtracking +/// in the first place. Namely, if we've backtracked past the parent of the +/// failed dep, or any of the packages flagged as giving us trouble in +/// `conflicting_activations`. +/// +/// Read +/// For several more detailed explanations of the logic here. +fn find_candidate<'a>( + backtrack_stack: &mut Vec, + parent: &Summary, + conflicting_activations: &HashMap, +) -> Option<(Candidate, bool, BacktrackFrame)> { + while let Some(mut frame) = backtrack_stack.pop() { + let next = frame + .remaining_candidates + .next(&frame.context_backup, &frame.dep); + let (candidate, has_another) = match next { + Ok(pair) => pair, + Err(_) => continue, + }; + // When we're calling this method we know that `parent` failed to + // activate. That means that some dependency failed to get resolved for + // whatever reason, and all of those reasons (plus maybe some extras) + // are listed in `conflicting_activations`. + // + // This means that if all members of `conflicting_activations` are still + // active in this back up we know that we're guaranteed to not actually + // make any progress. As a result if we hit this condition we can + // completely skip this backtrack frame and move on to the next. + if frame + .context_backup + .is_conflicting(Some(parent.package_id()), conflicting_activations) + { + continue; + } + + return Some((candidate, has_another, frame)); + } + None +} + +/// Returns String representation of dependency chain for a particular `pkgid`. +fn describe_path(graph: &Graph, pkgid: &PackageId) -> String { + use std::fmt::Write; + let dep_path = graph.path_to_top(pkgid); + let mut dep_path_desc = format!("package `{}`", dep_path[0]); + for dep in dep_path.iter().skip(1) { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); + } + dep_path_desc +} + +fn activation_error( + cx: &Context, + registry: &mut Registry, + parent: &Summary, + dep: &Dependency, + conflicting_activations: &HashMap, + candidates: &[Candidate], + config: Option<&Config>, +) -> CargoError { + let graph = cx.graph(); + if !candidates.is_empty() { + let mut msg = format!("failed to select a version for `{}`.", dep.name()); + msg.push_str("\n ... required by "); + msg.push_str(&describe_path(&graph, parent.package_id())); + + msg.push_str("\nversions that meet the requirements `"); + msg.push_str(&dep.version_req().to_string()); + msg.push_str("` are: "); + msg.push_str(&candidates + .iter() + .map(|v| v.summary.version()) + .map(|v| v.to_string()) + .collect::>() + .join(", ")); + + let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect(); + conflicting_activations.sort_unstable(); + let (links_errors, mut other_errors): (Vec<_>, Vec<_>) = conflicting_activations + .drain(..) + .rev() + .partition(|&(_, r)| r.is_links()); + + for &(p, r) in links_errors.iter() { + if let ConflictReason::Links(ref link) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*dep.name()); + msg.push_str("` links to the native library `"); + msg.push_str(link); + msg.push_str("`, but it conflicts with a previous package which links to `"); + msg.push_str(link); + msg.push_str("` as well:\n"); + } + msg.push_str(&describe_path(&graph, p)); + } + + let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors + .drain(..) + .partition(|&(_, r)| r.is_missing_features()); + + for &(p, r) in features_errors.iter() { + if let ConflictReason::MissingFeatures(ref features) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*p.name()); + msg.push_str("` depends on `"); + msg.push_str(&*dep.name()); + msg.push_str("`, with features: `"); + msg.push_str(features); + msg.push_str("` but `"); + msg.push_str(&*dep.name()); + msg.push_str("` does not have these features.\n"); + } + // p == parent so the full path is redundant. + } + + if !other_errors.is_empty() { + msg.push_str( + "\n\nall possible versions conflict with \ + previously selected packages.", + ); + } + + for &(p, _) in other_errors.iter() { + msg.push_str("\n\n previously selected "); + msg.push_str(&describe_path(&graph, p)); + } + + msg.push_str("\n\nfailed to select a version for `"); + msg.push_str(&*dep.name()); + msg.push_str("` which could resolve this conflict"); + + return format_err!("{}", msg); + } + + // Once we're all the way down here, we're definitely lost in the + // weeds! We didn't actually find any candidates, so we need to + // give an error message that nothing was found. + // + // Note that we re-query the registry with a new dependency that + // allows any version so we can give some nicer error reporting + // which indicates a few versions that were actually found. + let all_req = semver::VersionReq::parse("*").unwrap(); + let mut new_dep = dep.clone(); + new_dep.set_version_req(all_req); + let mut candidates = match registry.query_vec(&new_dep) { + Ok(candidates) => candidates, + Err(e) => return e, + }; + candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); + + let mut msg = if !candidates.is_empty() { + let versions = { + let mut versions = candidates + .iter() + .take(3) + .map(|cand| cand.version().to_string()) + .collect::>(); + + if candidates.len() > 3 { + versions.push("...".into()); + } + + versions.join(", ") + }; + + let mut msg = format!( + "no matching version `{}` found for package `{}`\n\ + location searched: {}\n\ + versions found: {}\n", + dep.version_req(), + dep.name(), + dep.source_id(), + versions + ); + msg.push_str("required by "); + msg.push_str(&describe_path(&graph, parent.package_id())); + + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { + msg.push_str( + "\nconsider running `cargo update` to update \ + a path dependency's locked version", + ); + } + + msg + } else { + let mut msg = format!( + "no matching package named `{}` found\n\ + location searched: {}\n", + dep.name(), + dep.source_id() + ); + msg.push_str("required by "); + msg.push_str(&describe_path(&graph, parent.package_id())); + + msg + }; + + if let Some(config) = config { + if config.cli_unstable().offline { + msg.push_str( + "\nAs a reminder, you're using offline mode (-Z offline) \ + which can sometimes cause surprising resolution failures, \ + if this error is too confusing you may with to retry \ + without the offline flag.", + ); + } + } + + format_err!("{}", msg) +} + +// Returns if `a` and `b` are compatible in the semver sense. This is a +// commutative operation. +// +// Versions `a` and `b` are compatible if their left-most nonzero digit is the +// same. +fn compatible(a: &semver::Version, b: &semver::Version) -> bool { + if a.major != b.major { + return false; + } + if a.major != 0 { + return true; + } + if a.minor != b.minor { + return false; + } + if a.minor != 0 { + return true; + } + a.patch == b.patch +} + +struct Requirements<'a> { + summary: &'a Summary, + // The deps map is a mapping of package name to list of features enabled. + // Each package should be enabled, and each package should have the + // specified set of features enabled. The boolean indicates whether this + // package was specifically requested (rather than just requesting features + // *within* this package). + deps: HashMap<&'a str, (bool, Vec)>, + // The used features set is the set of features which this local package had + // enabled, which is later used when compiling to instruct the code what + // features were enabled. + used: HashSet<&'a str>, + visited: HashSet<&'a str>, +} + +impl<'r> Requirements<'r> { + fn new<'a>(summary: &'a Summary) -> Requirements<'a> { + Requirements { + summary, + deps: HashMap::new(), + used: HashSet::new(), + visited: HashSet::new(), + } + } + + fn require_crate_feature(&mut self, package: &'r str, feat: &'r str) { + self.used.insert(package); + self.deps + .entry(package) + .or_insert((false, Vec::new())) + .1 + .push(feat.to_string()); + } + + fn seen(&mut self, feat: &'r str) -> bool { + if self.visited.insert(feat) { + self.used.insert(feat); + false + } else { + true + } + } + + fn require_dependency(&mut self, pkg: &'r str) { + if self.seen(pkg) { + return; + } + self.deps.entry(pkg).or_insert((false, Vec::new())).0 = true; + } + + fn require_feature(&mut self, feat: &'r str) -> CargoResult<()> { + if self.seen(feat) { + return Ok(()); + } + for f in self.summary + .features() + .get(feat) + .expect("must be a valid feature") + { + if f == feat { + bail!( + "Cyclic feature dependency: feature `{}` depends on itself", + feat + ); + } + self.add_feature(f)?; + } + Ok(()) + } + + fn add_feature(&mut self, feat: &'r str) -> CargoResult<()> { + if feat.is_empty() { + return Ok(()); + } + + // If this feature is of the form `foo/bar`, then we just lookup package + // `foo` and enable its feature `bar`. Otherwise this feature is of the + // form `foo` and we need to recurse to enable the feature `foo` for our + // own package, which may end up enabling more features or just enabling + // a dependency. + let mut parts = feat.splitn(2, '/'); + let feat_or_package = parts.next().unwrap(); + match parts.next() { + Some(feat) => { + self.require_crate_feature(feat_or_package, feat); + } + None => { + if self.summary.features().contains_key(feat_or_package) { + self.require_feature(feat_or_package)?; + } else { + self.require_dependency(feat_or_package); + } + } + } + Ok(()) + } +} + +/// Takes requested features for a single package from the input Method and +/// recurses to find all requested features, dependencies and requested +/// dependency features in a Requirements object, returning it to the resolver. +fn build_requirements<'a, 'b: 'a>( + s: &'a Summary, + method: &'b Method, +) -> CargoResult> { + let mut reqs = Requirements::new(s); + match *method { + Method::Everything + | Method::Required { + all_features: true, .. + } => { + for key in s.features().keys() { + reqs.require_feature(key)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name().to_inner()); + } + } + Method::Required { + features: requested_features, + .. + } => for feat in requested_features.iter() { + reqs.add_feature(feat)?; + }, + } + match *method { + Method::Everything + | Method::Required { + uses_default_features: true, + .. + } => { + if s.features().get("default").is_some() { + reqs.require_feature("default")?; + } + } + Method::Required { + uses_default_features: false, + .. + } => {} + } + Ok(reqs) +} + +impl Context { + /// Activate this summary by inserting it into our list of known activations. + /// + /// Returns true if this summary with the given method is already activated. + fn flag_activated(&mut self, summary: &Summary, method: &Method) -> CargoResult { + let id = summary.package_id(); + let prev = self.activations + .entry((id.name(), id.source_id().clone())) + .or_insert_with(|| Rc::new(Vec::new())); + if !prev.iter().any(|c| c == summary) { + self.resolve_graph.push(GraphNode::Add(id.clone())); + if let Some(link) = summary.links() { + ensure!( + self.links.insert(link, id.clone()).is_none(), + "Attempting to resolve a with more then one crate with the links={}. \n\ + This will not build as is. Consider rebuilding the .lock file.", + &*link + ); + } + let mut inner: Vec<_> = (**prev).clone(); + inner.push(summary.clone()); + *prev = Rc::new(inner); + return Ok(false); + } + debug!("checking if {} is already activated", summary.package_id()); + let (features, use_default) = match *method { + Method::Everything + | Method::Required { + all_features: true, .. + } => return Ok(false), + Method::Required { + features, + uses_default_features, + .. + } => (features, uses_default_features), + }; + + let has_default_feature = summary.features().contains_key("default"); + Ok(match self.resolve_features.get(id) { + Some(prev) => { + features + .iter() + .all(|f| prev.contains(&InternedString::new(f))) + && (!use_default || prev.contains(&InternedString::new("default")) + || !has_default_feature) + } + None => features.is_empty() && (!use_default || !has_default_feature), + }) + } + + fn build_deps( + &mut self, + registry: &mut RegistryQueryer, + parent: Option<&Summary>, + candidate: &Summary, + method: &Method, + ) -> ActivateResult> { + // First, figure out our set of dependencies based on the requested set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let deps = self.resolve_features(parent, candidate, method)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps.into_iter() + .map(|(dep, features)| { + let candidates = registry.query(&dep)?; + Ok((dep, candidates, Rc::new(features))) + }) + .collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + Ok(deps) + } + + fn prev_active(&self, dep: &Dependency) -> &[Summary] { + self.activations + .get(&(dep.name(), dep.source_id().clone())) + .map(|v| &v[..]) + .unwrap_or(&[]) + } + + fn is_active(&self, id: &PackageId) -> bool { + self.activations + .get(&(id.name(), id.source_id().clone())) + .map(|v| v.iter().any(|s| s.package_id() == id)) + .unwrap_or(false) + } + + /// checks whether all of `parent` and the keys of `conflicting activations` + /// are still active + fn is_conflicting( + &self, + parent: Option<&PackageId>, + conflicting_activations: &HashMap, + ) -> bool { + conflicting_activations + .keys() + .chain(parent) + .all(|id| self.is_active(id)) + } + + /// Return all dependencies and the features we want from them. + fn resolve_features<'b>( + &mut self, + parent: Option<&Summary>, + s: &'b Summary, + method: &'b Method, + ) -> ActivateResult)>> { + let dev_deps = match *method { + Method::Everything => true, + Method::Required { dev_deps, .. } => dev_deps, + }; + + // First, filter by dev-dependencies + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + + let mut reqs = build_requirements(s, method)?; + let mut ret = Vec::new(); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a feature + if dep.is_optional() && !reqs.deps.contains_key(&*dep.name()) { + continue; + } + // So we want this dependency. Move the features we want from `feature_deps` + // to `ret`. + let base = reqs.deps.remove(&*dep.name()).unwrap_or((false, vec![])); + if !dep.is_optional() && base.0 { + self.warnings.push(format!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features. \ + This is currently a warning to ease the transition, but it will become an \ + error in the future.", + s.package_id(), + dep.name() + )); + } + let mut base = base.1; + base.extend(dep.features().iter().cloned()); + for feature in base.iter() { + if feature.contains('/') { + return Err( + format_err!("feature names may not contain slashes: `{}`", feature).into(), + ); + } + } + ret.push((dep.clone(), base)); + } + + // Any remaining entries in feature_deps are bugs in that the package does not actually + // have those dependencies. We classified them as dependencies in the first place + // because there is no such feature, either. + if !reqs.deps.is_empty() { + let unknown = reqs.deps.keys().map(|s| &s[..]).collect::>(); + let features = unknown.join(", "); + return Err(match parent { + None => format_err!( + "Package `{}` does not have these features: `{}`", + s.package_id(), + features + ).into(), + Some(p) => ( + p.package_id().clone(), + ConflictReason::MissingFeatures(features), + ).into(), + }); + } + + // Record what list of features is active for this package. + if !reqs.used.is_empty() { + let pkgid = s.package_id(); + + let set = self.resolve_features + .entry(pkgid.clone()) + .or_insert_with(HashSet::new); + for feature in reqs.used { + set.insert(InternedString::new(feature)); + } + } + + Ok(ret) + } + + fn resolve_replacements(&self) -> HashMap { + let mut replacements = HashMap::new(); + let mut cur = &self.resolve_replacements; + while let Some(ref node) = cur.head { + let (k, v) = node.0.clone(); + replacements.insert(k, v); + cur = &node.1; + } + replacements + } + + fn graph(&self) -> Graph { + let mut graph = Graph::new(); + let mut cur = &self.resolve_graph; + while let Some(ref node) = cur.head { + match node.0 { + GraphNode::Add(ref p) => graph.add(p.clone(), &[]), + GraphNode::Link(ref a, ref b) => graph.link(a.clone(), b.clone()), + } + cur = &node.1; + } + graph + } +} + +fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> { + let summaries: HashMap<&PackageId, &Summary> = activations + .values() + .flat_map(|v| v.iter()) + .map(|s| (s.package_id(), s)) + .collect(); + + // Sort packages to produce user friendly deterministic errors. + let all_packages = resolve.iter().collect::>().into_sorted_vec(); + let mut checked = HashSet::new(); + for pkg in all_packages { + if !checked.contains(pkg) { + visit(resolve, pkg, &summaries, &mut HashSet::new(), &mut checked)? + } + } + return Ok(()); + + fn visit<'a>( + resolve: &'a Resolve, + id: &'a PackageId, + summaries: &HashMap<&'a PackageId, &Summary>, + visited: &mut HashSet<&'a PackageId>, + checked: &mut HashSet<&'a PackageId>, + ) -> CargoResult<()> { + // See if we visited ourselves + if !visited.insert(id) { + bail!( + "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}", + id, + describe_path(&resolve.graph, id) + ); + } + + // If we've already checked this node no need to recurse again as we'll + // just conclude the same thing as last time, so we only execute the + // recursive step if we successfully insert into `checked`. + // + // Note that if we hit an intransitive dependency then we clear out the + // visitation list as we can't induce a cycle through transitive + // dependencies. + if checked.insert(id) { + let summary = summaries[id]; + for dep in resolve.deps_not_replaced(id) { + let is_transitive = summary + .dependencies() + .iter() + .any(|d| d.matches_id(dep) && d.is_transitive()); + let mut empty = HashSet::new(); + let visited = if is_transitive { + &mut *visited + } else { + &mut empty + }; + visit(resolve, dep, summaries, visited, checked)?; + + if let Some(id) = resolve.replacement(dep) { + visit(resolve, id, summaries, visited, checked)?; + } + } + } + + // Ok, we're done, no longer visiting our node any more + visited.remove(id); + Ok(()) + } +} diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs new file mode 100644 index 000000000..8599e9e6d --- /dev/null +++ b/src/cargo/core/shell.rs @@ -0,0 +1,355 @@ +use std::fmt; +use std::io::prelude::*; + +use atty; +use termcolor::Color::{Cyan, Green, Red, Yellow}; +use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor}; + +use util::errors::CargoResult; + +/// The requested verbosity of output +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Verbosity { + Verbose, + Normal, + Quiet, +} + +/// An abstraction around a `Write`able object that remembers preferences for output verbosity and +/// color. +pub struct Shell { + /// the `Write`able object, either with or without color support (represented by different enum + /// variants) + err: ShellOut, + /// How verbose messages should be + verbosity: Verbosity, +} + +impl fmt::Debug for Shell { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.err { + ShellOut::Write(_) => f.debug_struct("Shell") + .field("verbosity", &self.verbosity) + .finish(), + ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell") + .field("verbosity", &self.verbosity) + .field("color_choice", &color_choice) + .finish(), + } + } +} + +/// A `Write`able object, either with or without color support +enum ShellOut { + /// A plain write object without color support + Write(Box), + /// Color-enabled stdio, with information on whether color should be used + Stream { + stream: StandardStream, + tty: bool, + color_choice: ColorChoice, + }, +} + +/// Whether messages should use color output +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum ColorChoice { + /// Force color output + Always, + /// Force disable color output + Never, + /// Intelligently guess whether to use color output + CargoAuto, +} + +impl Shell { + /// Create a new shell (color choice and verbosity), defaulting to 'auto' color and verbose + /// output. + pub fn new() -> Shell { + Shell { + err: ShellOut::Stream { + stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()), + color_choice: ColorChoice::CargoAuto, + tty: atty::is(atty::Stream::Stderr), + }, + verbosity: Verbosity::Verbose, + } + } + + /// Create a shell from a plain writable object, with no color, and max verbosity. + pub fn from_write(out: Box) -> Shell { + Shell { + err: ShellOut::Write(out), + verbosity: Verbosity::Verbose, + } + } + + /// Print a message, where the status will have `color` color, and can be justified. The + /// messages follows without color. + fn print( + &mut self, + status: &fmt::Display, + message: Option<&fmt::Display>, + color: Color, + justified: bool, + ) -> CargoResult<()> { + match self.verbosity { + Verbosity::Quiet => Ok(()), + _ => self.err.print(status, message, color, justified), + } + } + + /// Returns the width of the terminal in spaces, if any + pub fn err_width(&self) -> Option { + match self.err { + ShellOut::Stream { tty: true, .. } => imp::stderr_width(), + _ => None, + } + } + + /// Returns whether stderr is a tty + pub fn is_err_tty(&self) -> bool { + match self.err { + ShellOut::Stream { tty, .. } => tty, + _ => false, + } + } + + /// Get a reference to the underlying writer + pub fn err(&mut self) -> &mut Write { + self.err.as_write() + } + + /// Shortcut to right-align and color green a status message. + pub fn status(&mut self, status: T, message: U) -> CargoResult<()> + where + T: fmt::Display, + U: fmt::Display, + { + self.print(&status, Some(&message), Green, true) + } + + pub fn status_header(&mut self, status: T) -> CargoResult<()> + where + T: fmt::Display, + { + self.print(&status, None, Cyan, true) + } + + /// Shortcut to right-align a status message. + pub fn status_with_color( + &mut self, + status: T, + message: U, + color: Color, + ) -> CargoResult<()> + where + T: fmt::Display, + U: fmt::Display, + { + self.print(&status, Some(&message), color, true) + } + + /// Run the callback only if we are in verbose mode + pub fn verbose(&mut self, mut callback: F) -> CargoResult<()> + where + F: FnMut(&mut Shell) -> CargoResult<()>, + { + match self.verbosity { + Verbosity::Verbose => callback(self), + _ => Ok(()), + } + } + + /// Run the callback if we are not in verbose mode. + pub fn concise(&mut self, mut callback: F) -> CargoResult<()> + where + F: FnMut(&mut Shell) -> CargoResult<()>, + { + match self.verbosity { + Verbosity::Verbose => Ok(()), + _ => callback(self), + } + } + + /// Print a red 'error' message + pub fn error(&mut self, message: T) -> CargoResult<()> { + self.print(&"error:", Some(&message), Red, false) + } + + /// Print an amber 'warning' message + pub fn warn(&mut self, message: T) -> CargoResult<()> { + match self.verbosity { + Verbosity::Quiet => Ok(()), + _ => self.print(&"warning:", Some(&message), Yellow, false), + } + } + + /// Update the verbosity of the shell + pub fn set_verbosity(&mut self, verbosity: Verbosity) { + self.verbosity = verbosity; + } + + /// Get the verbosity of the shell + pub fn verbosity(&self) -> Verbosity { + self.verbosity + } + + /// Update the color choice (always, never, or auto) from a string. + pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { + if let ShellOut::Stream { + ref mut stream, + ref mut color_choice, + .. + } = self.err + { + let cfg = match color { + Some("always") => ColorChoice::Always, + Some("never") => ColorChoice::Never, + + Some("auto") | None => ColorChoice::CargoAuto, + + Some(arg) => bail!( + "argument for --color must be auto, always, or \ + never, but found `{}`", + arg + ), + }; + *color_choice = cfg; + *stream = StandardStream::stderr(cfg.to_termcolor_color_choice()); + } + Ok(()) + } + + /// Get the current color choice + /// + /// If we are not using a color stream, this will always return Never, even if the color choice + /// has been set to something else. + pub fn color_choice(&self) -> ColorChoice { + match self.err { + ShellOut::Stream { color_choice, .. } => color_choice, + ShellOut::Write(_) => ColorChoice::Never, + } + } +} + +impl Default for Shell { + fn default() -> Self { + Self::new() + } +} + +impl ShellOut { + /// Print out a message with a status. The status comes first and is bold + the given color. + /// The status can be justified, in which case the max width that will right align is 12 chars. + fn print( + &mut self, + status: &fmt::Display, + message: Option<&fmt::Display>, + color: Color, + justified: bool, + ) -> CargoResult<()> { + match *self { + ShellOut::Stream { ref mut stream, .. } => { + stream.reset()?; + stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; + if justified { + write!(stream, "{:>12}", status)?; + } else { + write!(stream, "{}", status)?; + } + stream.reset()?; + match message { + Some(message) => write!(stream, " {}\n", message)?, + None => write!(stream, " ")?, + } + } + ShellOut::Write(ref mut w) => { + if justified { + write!(w, "{:>12}", status)?; + } else { + write!(w, "{}", status)?; + } + match message { + Some(message) => write!(w, " {}\n", message)?, + None => write!(w, " ")?, + } + } + } + Ok(()) + } + + /// Get this object as a `io::Write`. + fn as_write(&mut self) -> &mut Write { + match *self { + ShellOut::Stream { ref mut stream, .. } => stream, + ShellOut::Write(ref mut w) => w, + } + } +} + +impl ColorChoice { + /// Convert our color choice to termcolor's version + fn to_termcolor_color_choice(&self) -> termcolor::ColorChoice { + match *self { + ColorChoice::Always => termcolor::ColorChoice::Always, + ColorChoice::Never => termcolor::ColorChoice::Never, + ColorChoice::CargoAuto => { + if atty::is(atty::Stream::Stderr) { + termcolor::ColorChoice::Auto + } else { + termcolor::ColorChoice::Never + } + } + } + } +} + +#[cfg(any(target_os = "linux", target_os = "macos"))] +mod imp { + use std::mem; + + use libc; + + pub fn stderr_width() -> Option { + unsafe { + let mut winsize: libc::winsize = mem::zeroed(); + if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 { + return None; + } + if winsize.ws_col > 0 { + Some(winsize.ws_col as usize) + } else { + None + } + } + } +} + +#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))] +mod imp { + pub fn stderr_width() -> Option { + None + } +} + +#[cfg(windows)] +mod imp { + extern crate winapi; + + use std::mem; + use self::winapi::um::processenv::*; + use self::winapi::um::winbase::*; + use self::winapi::um::wincon::*; + + pub fn stderr_width() -> Option { + unsafe { + let stdout = GetStdHandle(STD_ERROR_HANDLE); + let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); + if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 { + return None; + } + Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize) + } + } +} diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs new file mode 100644 index 000000000..65380e207 --- /dev/null +++ b/src/cargo/core/source/mod.rs @@ -0,0 +1,160 @@ +use std::collections::hash_map::{HashMap, IterMut, Values}; + +use core::{Package, PackageId, Registry}; +use util::CargoResult; + +mod source_id; + +pub use self::source_id::{GitReference, SourceId}; + +/// A Source finds and downloads remote packages based on names and +/// versions. +pub trait Source: Registry { + /// Returns the `SourceId` corresponding to this source + fn source_id(&self) -> &SourceId; + + /// The update method performs any network operations required to + /// get the entire list of all names, versions and dependencies of + /// packages managed by the Source. + fn update(&mut self) -> CargoResult<()>; + + /// The download method fetches the full package for each name and + /// version specified. + fn download(&mut self, package: &PackageId) -> CargoResult; + + /// Generates a unique string which represents the fingerprint of the + /// current state of the source. + /// + /// This fingerprint is used to determine the "fresheness" of the source + /// later on. It must be guaranteed that the fingerprint of a source is + /// constant if and only if the output product will remain constant. + /// + /// The `pkg` argument is the package which this fingerprint should only be + /// interested in for when this source may contain multiple packages. + fn fingerprint(&self, pkg: &Package) -> CargoResult; + + /// If this source supports it, verifies the source of the package + /// specified. + /// + /// Note that the source may also have performed other checksum-based + /// verification during the `download` step, but this is intended to be run + /// just before a crate is compiled so it may perform more expensive checks + /// which may not be cacheable. + fn verify(&self, _pkg: &PackageId) -> CargoResult<()> { + Ok(()) + } +} + +impl<'a, T: Source + ?Sized + 'a> Source for Box { + /// Forwards to `Source::source_id` + fn source_id(&self) -> &SourceId { + (**self).source_id() + } + + /// Forwards to `Source::update` + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + /// Forwards to `Source::download` + fn download(&mut self, id: &PackageId) -> CargoResult { + (**self).download(id) + } + + /// Forwards to `Source::fingerprint` + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } + + /// Forwards to `Source::verify` + fn verify(&self, pkg: &PackageId) -> CargoResult<()> { + (**self).verify(pkg) + } +} + +/// A `HashMap` of `SourceId` -> `Box` +#[derive(Default)] +pub struct SourceMap<'src> { + map: HashMap>, +} + +/// A `std::collection::hash_map::Values` for `SourceMap` +pub type Sources<'a, 'src> = Values<'a, SourceId, Box>; + +/// A `std::collection::hash_map::IterMut` for `SourceMap` +pub struct SourcesMut<'a, 'src: 'a> { + inner: IterMut<'a, SourceId, Box>, +} + +impl<'src> SourceMap<'src> { + /// Create an empty map + pub fn new() -> SourceMap<'src> { + SourceMap { + map: HashMap::new(), + } + } + + /// Like `HashMap::contains_key` + pub fn contains(&self, id: &SourceId) -> bool { + self.map.contains_key(id) + } + + /// Like `HashMap::get` + pub fn get(&self, id: &SourceId) -> Option<&(Source + 'src)> { + let source = self.map.get(id); + + source.map(|s| { + let s: &(Source + 'src) = &**s; + s + }) + } + + /// Like `HashMap::get_mut` + pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source + 'src)> { + self.map.get_mut(id).map(|s| { + let s: &mut (Source + 'src) = &mut **s; + s + }) + } + + /// Like `HashMap::get`, but first calculates the `SourceId` from a + /// `PackageId` + pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source + 'src)> { + self.get(pkg_id.source_id()) + } + + /// Like `HashMap::insert`, but derives the SourceId key from the Source + pub fn insert(&mut self, source: Box) { + let id = source.source_id().clone(); + self.map.insert(id, source); + } + + /// Like `HashMap::is_empty` + pub fn is_empty(&self) -> bool { + self.map.is_empty() + } + + /// Like `HashMap::len` + pub fn len(&self) -> usize { + self.map.len() + } + + /// Like `HashMap::values` + pub fn sources<'a>(&'a self) -> Sources<'a, 'src> { + self.map.values() + } + + /// Like `HashMap::iter_mut` + pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> { + SourcesMut { + inner: self.map.iter_mut(), + } + } +} + +impl<'a, 'src> Iterator for SourcesMut<'a, 'src> { + type Item = (&'a SourceId, &'a mut (Source + 'src)); + fn next(&mut self) -> Option<(&'a SourceId, &'a mut (Source + 'src))> { + self.inner.next().map(|(a, b)| (a, &mut **b)) + } +} diff --git a/src/cargo/core/source/source_id.rs b/src/cargo/core/source/source_id.rs new file mode 100644 index 000000000..c956867b3 --- /dev/null +++ b/src/cargo/core/source/source_id.rs @@ -0,0 +1,569 @@ +use std::cmp::{self, Ordering}; +use std::fmt::{self, Formatter}; +use std::hash::{self, Hash}; +use std::path::Path; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::atomic::Ordering::SeqCst; + +use serde::ser; +use serde::de; +use url::Url; + +use ops; +use sources::git; +use sources::{GitSource, PathSource, RegistrySource, CRATES_IO}; +use sources::DirectorySource; +use util::{CargoResult, Config, ToUrl}; + +/// Unique identifier for a source of packages. +#[derive(Clone, Eq, Debug)] +pub struct SourceId { + inner: Arc, +} + +#[derive(Eq, Clone, Debug)] +struct SourceIdInner { + /// The source URL + url: Url, + /// `git::canonicalize_url(url)` for the url field + canonical_url: Url, + /// The source kind + kind: Kind, + // e.g. the exact git revision of the specified branch for a Git Source + precise: Option, + /// Name of the registry source for alternative registries + name: Option, +} + +/// The possible kinds of code source. Along with a URL, this fully defines the +/// source +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum Kind { + /// Kind::Git() represents a git repository + Git(GitReference), + /// represents a local path + Path, + /// represents a remote registry + Registry, + /// represents a local filesystem-based registry + LocalRegistry, + /// represents a directory-based registry + Directory, +} + +/// Information to find a specific commit in a git repository +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum GitReference { + /// from a tag + Tag(String), + /// from the HEAD of a branch + Branch(String), + /// from a specific revision + Rev(String), +} + +impl SourceId { + /// Create a SourceId object from the kind and url. + /// + /// The canonical url will be calculated, but the precise field will not + fn new(kind: Kind, url: Url) -> CargoResult { + let source_id = SourceId { + inner: Arc::new(SourceIdInner { + kind, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: None, + }), + }; + Ok(source_id) + } + + /// Parses a source URL and returns the corresponding ID. + /// + /// ## Example + /// + /// ``` + /// use cargo::core::SourceId; + /// SourceId::from_url("git+https://github.com/alexcrichton/\ + /// libssh2-static-sys#80e71a3021618eb05\ + /// 656c58fb7c5ef5f12bc747f"); + /// ``` + pub fn from_url(string: &str) -> CargoResult { + let mut parts = string.splitn(2, '+'); + let kind = parts.next().unwrap(); + let url = parts + .next() + .ok_or_else(|| format_err!("invalid source `{}`", string))?; + + match kind { + "git" => { + let mut url = url.to_url()?; + let mut reference = GitReference::Branch("master".to_string()); + for (k, v) in url.query_pairs() { + match &k[..] { + // map older 'ref' to branch + "branch" | "ref" => reference = GitReference::Branch(v.into_owned()), + + "rev" => reference = GitReference::Rev(v.into_owned()), + "tag" => reference = GitReference::Tag(v.into_owned()), + _ => {} + } + } + let precise = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); + url.set_query(None); + Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) + } + "registry" => { + let url = url.to_url()?; + Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string()))) + } + "path" => { + let url = url.to_url()?; + SourceId::new(Kind::Path, url) + } + kind => Err(format_err!("unsupported source protocol: {}", kind)), + } + } + + /// A view of the `SourceId` that can be `Display`ed as a URL + pub fn to_url(&self) -> SourceIdToUrl { + SourceIdToUrl { + inner: &*self.inner, + } + } + + /// Create a SourceId from a filesystem path. + /// + /// Pass absolute path + pub fn for_path(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::Path, url) + } + + /// Crate a SourceId from a git reference + pub fn for_git(url: &Url, reference: GitReference) -> CargoResult { + SourceId::new(Kind::Git(reference), url.clone()) + } + + /// Create a SourceId from a registry url + pub fn for_registry(url: &Url) -> CargoResult { + SourceId::new(Kind::Registry, url.clone()) + } + + /// Create a SourceId from a local registry path + pub fn for_local_registry(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::LocalRegistry, url) + } + + /// Create a SourceId from a directory path + pub fn for_directory(path: &Path) -> CargoResult { + let url = path.to_url()?; + SourceId::new(Kind::Directory, url) + } + + /// Returns the `SourceId` corresponding to the main repository. + /// + /// This is the main cargo registry by default, but it can be overridden in + /// a `.cargo/config`. + pub fn crates_io(config: &Config) -> CargoResult { + config.crates_io_source_id(|| { + let cfg = ops::registry_configuration(config, None)?; + let url = if let Some(ref index) = cfg.index { + static WARNED: AtomicBool = ATOMIC_BOOL_INIT; + if !WARNED.swap(true, SeqCst) { + config.shell().warn( + "custom registry support via \ + the `registry.index` configuration is \ + being removed, this functionality \ + will not work in the future", + )?; + } + &index[..] + } else { + CRATES_IO + }; + let url = url.to_url()?; + SourceId::for_registry(&url) + }) + } + + pub fn alt_registry(config: &Config, key: &str) -> CargoResult { + let url = config.get_registry_index(key)?; + Ok(SourceId { + inner: Arc::new(SourceIdInner { + kind: Kind::Registry, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: Some(key.to_string()), + }), + }) + } + + /// Get this source URL + pub fn url(&self) -> &Url { + &self.inner.url + } + + pub fn display_registry(&self) -> String { + format!("registry `{}`", self.url()) + } + + /// Is this source from a filesystem path + pub fn is_path(&self) -> bool { + self.inner.kind == Kind::Path + } + + /// Is this source from a registry (either local or not) + pub fn is_registry(&self) -> bool { + match self.inner.kind { + Kind::Registry | Kind::LocalRegistry => true, + _ => false, + } + } + + /// Is this source from an alternative registry + pub fn is_alt_registry(&self) -> bool { + self.is_registry() && self.inner.name.is_some() + } + + /// Is this source from a git repository + pub fn is_git(&self) -> bool { + match self.inner.kind { + Kind::Git(_) => true, + _ => false, + } + } + + /// Creates an implementation of `Source` corresponding to this ID. + pub fn load<'a>(&self, config: &'a Config) -> CargoResult> { + trace!("loading SourceId; {}", self); + match self.inner.kind { + Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), + Kind::Path => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(PathSource::new(&path, self, config))) + } + Kind::Registry => Ok(Box::new(RegistrySource::remote(self, config))), + Kind::LocalRegistry => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(RegistrySource::local(self, &path, config))) + } + Kind::Directory => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(DirectorySource::new(&path, self, config))) + } + } + } + + /// Get the value of the precise field + pub fn precise(&self) -> Option<&str> { + self.inner.precise.as_ref().map(|s| &s[..]) + } + + /// Get the git reference if this is a git source, otherwise None. + pub fn git_reference(&self) -> Option<&GitReference> { + match self.inner.kind { + Kind::Git(ref s) => Some(s), + _ => None, + } + } + + /// Create a new SourceId from this source with the given `precise` + pub fn with_precise(&self, v: Option) -> SourceId { + SourceId { + inner: Arc::new(SourceIdInner { + precise: v, + ..(*self.inner).clone() + }), + } + } + + /// Whether the remote registry is the standard https://crates.io + pub fn is_default_registry(&self) -> bool { + match self.inner.kind { + Kind::Registry => {} + _ => return false, + } + self.inner.url.to_string() == CRATES_IO + } + + /// Hash `self` + /// + /// For paths, remove the workspace prefix so the same source will give the + /// same hash in different locations. + pub fn stable_hash(&self, workspace: &Path, into: &mut S) { + if self.is_path() { + if let Ok(p) = self.inner + .url + .to_file_path() + .unwrap() + .strip_prefix(workspace) + { + self.inner.kind.hash(into); + p.to_str().unwrap().hash(into); + return; + } + } + self.hash(into) + } +} + +impl PartialEq for SourceId { + fn eq(&self, other: &SourceId) -> bool { + (*self.inner).eq(&*other.inner) + } +} + +impl PartialOrd for SourceId { + fn partial_cmp(&self, other: &SourceId) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceId { + fn cmp(&self, other: &SourceId) -> Ordering { + self.inner.cmp(&other.inner) + } +} + +impl ser::Serialize for SourceId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + if self.is_path() { + None::.serialize(s) + } else { + s.collect_str(&self.to_url()) + } + } +} + +impl<'de> de::Deserialize<'de> for SourceId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + SourceId::from_url(&string).map_err(de::Error::custom) + } +} + +impl fmt::Display for SourceId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { + kind: Kind::Path, + ref url, + .. + } => fmt::Display::fmt(url, f), + SourceIdInner { + kind: Kind::Git(ref reference), + ref url, + ref precise, + .. + } => { + write!(f, "{}", url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + + if let Some(ref s) = *precise { + let len = cmp::min(s.len(), 8); + write!(f, "#{}", &s[..len])?; + } + Ok(()) + } + SourceIdInner { + kind: Kind::Registry, + ref url, + .. + } + | SourceIdInner { + kind: Kind::LocalRegistry, + ref url, + .. + } => write!(f, "registry `{}`", url), + SourceIdInner { + kind: Kind::Directory, + ref url, + .. + } => write!(f, "dir {}", url), + } + } +} + +// This custom implementation handles situations such as when two git sources +// point at *almost* the same URL, but not quite, even when they actually point +// to the same repository. +/// This method tests for self and other values to be equal, and is used by ==. +/// +/// For git repositories, the canonical url is checked. +impl PartialEq for SourceIdInner { + fn eq(&self, other: &SourceIdInner) -> bool { + if self.kind != other.kind { + return false; + } + if self.url == other.url { + return true; + } + + match (&self.kind, &other.kind) { + (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { + ref1 == ref2 && self.canonical_url == other.canonical_url + } + _ => false, + } + } +} + +impl PartialOrd for SourceIdInner { + fn partial_cmp(&self, other: &SourceIdInner) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceIdInner { + fn cmp(&self, other: &SourceIdInner) -> Ordering { + match self.kind.cmp(&other.kind) { + Ordering::Equal => {} + ord => return ord, + } + match self.url.cmp(&other.url) { + Ordering::Equal => {} + ord => return ord, + } + match (&self.kind, &other.kind) { + (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { + (ref1, &self.canonical_url).cmp(&(ref2, &other.canonical_url)) + } + _ => self.kind.cmp(&other.kind), + } + } +} + +// The hash of SourceId is used in the name of some Cargo folders, so shouldn't +// vary. `as_str` gives the serialisation of a url (which has a spec) and so +// insulates against possible changes in how the url crate does hashing. +impl Hash for SourceId { + fn hash(&self, into: &mut S) { + self.inner.kind.hash(into); + match *self.inner { + SourceIdInner { + kind: Kind::Git(..), + ref canonical_url, + .. + } => canonical_url.as_str().hash(into), + _ => self.inner.url.as_str().hash(into), + } + } +} + +/// A `Display`able view into a `SourceId` that will write it as a url +pub struct SourceIdToUrl<'a> { + inner: &'a SourceIdInner, +} + +impl<'a> fmt::Display for SourceIdToUrl<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { + kind: Kind::Path, + ref url, + .. + } => write!(f, "path+{}", url), + SourceIdInner { + kind: Kind::Git(ref reference), + ref url, + ref precise, + .. + } => { + write!(f, "git+{}", url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + if let Some(precise) = precise.as_ref() { + write!(f, "#{}", precise)?; + } + Ok(()) + } + SourceIdInner { + kind: Kind::Registry, + ref url, + .. + } => write!(f, "registry+{}", url), + SourceIdInner { + kind: Kind::LocalRegistry, + ref url, + .. + } => write!(f, "local-registry+{}", url), + SourceIdInner { + kind: Kind::Directory, + ref url, + .. + } => write!(f, "directory+{}", url), + } + } +} + +impl GitReference { + /// Returns a `Display`able view of this git reference, or None if using + /// the head of the "master" branch + pub fn pretty_ref(&self) -> Option { + match *self { + GitReference::Branch(ref s) if *s == "master" => None, + _ => Some(PrettyRef { inner: self }), + } + } +} + +/// A git reference that can be `Display`ed +pub struct PrettyRef<'a> { + inner: &'a GitReference, +} + +impl<'a> fmt::Display for PrettyRef<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self.inner { + GitReference::Branch(ref b) => write!(f, "branch={}", b), + GitReference::Tag(ref s) => write!(f, "tag={}", s), + GitReference::Rev(ref s) => write!(f, "rev={}", s), + } + } +} + +#[cfg(test)] +mod tests { + use super::{GitReference, Kind, SourceId}; + use util::ToUrl; + + #[test] + fn github_sources_equal() { + let loc = "https://github.com/foo/bar".to_url().unwrap(); + let master = Kind::Git(GitReference::Branch("master".to_string())); + let s1 = SourceId::new(master.clone(), loc).unwrap(); + + let loc = "git://github.com/foo/bar".to_url().unwrap(); + let s2 = SourceId::new(master, loc.clone()).unwrap(); + + assert_eq!(s1, s2); + + let foo = Kind::Git(GitReference::Branch("foo".to_string())); + let s3 = SourceId::new(foo, loc).unwrap(); + assert_ne!(s1, s3); + } +} diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs new file mode 100644 index 000000000..db2545d44 --- /dev/null +++ b/src/cargo/core/summary.rs @@ -0,0 +1,160 @@ +use std::collections::BTreeMap; +use std::mem; +use std::rc::Rc; + +use semver::Version; +use core::{Dependency, PackageId, SourceId}; +use core::interning::InternedString; + +use util::CargoResult; + +/// Subset of a `Manifest`. Contains only the most important information about +/// a package. +/// +/// Summaries are cloned, and should not be mutated after creation +#[derive(Debug, Clone)] +pub struct Summary { + inner: Rc, +} + +#[derive(Debug, Clone)] +struct Inner { + package_id: PackageId, + dependencies: Vec, + features: BTreeMap>, + checksum: Option, + links: Option, +} + +impl Summary { + pub fn new( + pkg_id: PackageId, + dependencies: Vec, + features: BTreeMap>, + links: Option, + ) -> CargoResult { + for dep in dependencies.iter() { + if features.get(&*dep.name()).is_some() { + bail!( + "Features and dependencies cannot have the \ + same name: `{}`", + dep.name() + ) + } + if dep.is_optional() && !dep.is_transitive() { + bail!( + "Dev-dependencies are not allowed to be optional: `{}`", + dep.name() + ) + } + } + for (feature, list) in features.iter() { + for dep in list.iter() { + let mut parts = dep.splitn(2, '/'); + let dep = parts.next().unwrap(); + let is_reexport = parts.next().is_some(); + if !is_reexport && features.get(dep).is_some() { + continue; + } + match dependencies.iter().find(|d| &*d.name() == dep) { + Some(d) => { + if d.is_optional() || is_reexport { + continue; + } + bail!( + "Feature `{}` depends on `{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, + dep + ) + } + None if is_reexport => bail!( + "Feature `{}` requires a feature of `{}` which is not a \ + dependency", + feature, + dep + ), + None => bail!( + "Feature `{}` includes `{}` which is neither \ + a dependency nor another feature", + feature, + dep + ), + } + } + } + Ok(Summary { + inner: Rc::new(Inner { + package_id: pkg_id, + dependencies, + features, + checksum: None, + links: links.map(|l| InternedString::new(&l)), + }), + }) + } + + pub fn package_id(&self) -> &PackageId { + &self.inner.package_id + } + pub fn name(&self) -> InternedString { + self.package_id().name() + } + pub fn version(&self) -> &Version { + self.package_id().version() + } + pub fn source_id(&self) -> &SourceId { + self.package_id().source_id() + } + pub fn dependencies(&self) -> &[Dependency] { + &self.inner.dependencies + } + pub fn features(&self) -> &BTreeMap> { + &self.inner.features + } + pub fn checksum(&self) -> Option<&str> { + self.inner.checksum.as_ref().map(|s| &s[..]) + } + pub fn links(&self) -> Option { + self.inner.links + } + + pub fn override_id(mut self, id: PackageId) -> Summary { + Rc::make_mut(&mut self.inner).package_id = id; + self + } + + pub fn set_checksum(mut self, cksum: String) -> Summary { + Rc::make_mut(&mut self.inner).checksum = Some(cksum); + self + } + + pub fn map_dependencies(mut self, f: F) -> Summary + where + F: FnMut(Dependency) -> Dependency, + { + { + let slot = &mut Rc::make_mut(&mut self.inner).dependencies; + let deps = mem::replace(slot, Vec::new()); + *slot = deps.into_iter().map(f).collect(); + } + self + } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary { + let me = if self.package_id().source_id() == to_replace { + let new_id = self.package_id().with_source_id(replace_with); + self.override_id(new_id) + } else { + self + }; + me.map_dependencies(|dep| dep.map_source(to_replace, replace_with)) + } +} + +impl PartialEq for Summary { + fn eq(&self, other: &Summary) -> bool { + self.inner.package_id == other.inner.package_id + } +} diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs new file mode 100644 index 000000000..9377b185c --- /dev/null +++ b/src/cargo/core/workspace.rs @@ -0,0 +1,858 @@ +use std::cell::RefCell; +use std::collections::BTreeMap; +use std::collections::hash_map::{Entry, HashMap}; +use std::path::{Path, PathBuf}; +use std::slice; + +use glob::glob; +use url::Url; + +use core::registry::PackageRegistry; +use core::{EitherManifest, Package, SourceId, VirtualManifest}; +use core::{Dependency, PackageIdSpec, Profile, Profiles}; +use ops; +use sources::PathSource; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; +use util::toml::read_manifest; +use util::{Config, Filesystem}; + +/// The core abstraction in Cargo for working with a workspace of crates. +/// +/// A workspace is often created very early on and then threaded through all +/// other functions. It's typically through this object that the current +/// package is loaded and/or learned about. +#[derive(Debug)] +pub struct Workspace<'cfg> { + config: &'cfg Config, + + // This path is a path to where the current cargo subcommand was invoked + // from. That is, this is the `--manifest-path` argument to Cargo, and + // points to the "main crate" that we're going to worry about. + current_manifest: PathBuf, + + // A list of packages found in this workspace. Always includes at least the + // package mentioned by `current_manifest`. + packages: Packages<'cfg>, + + // If this workspace includes more than one crate, this points to the root + // of the workspace. This is `None` in the case that `[workspace]` is + // missing, `package.workspace` is missing, and no `Cargo.toml` above + // `current_manifest` was found on the filesystem with `[workspace]`. + root_manifest: Option, + + // Shared target directory for all the packages of this workspace. + // `None` if the default path of `root/target` should be used. + target_dir: Option, + + // List of members in this workspace with a listing of all their manifest + // paths. The packages themselves can be looked up through the `packages` + // set above. + members: Vec, + + // The subset of `members` that are used by the + // `build`, `check`, `test`, and `bench` subcommands + // when no package is selected with `--package` / `-p` and `--all` + // is not used. + // + // This is set by the `default-members` config + // in the `[workspace]` section. + // When unset, this is the same as `members` for virtual workspaces + // (`--all` is implied) + // or only the root package for non-virtual workspaces. + default_members: Vec, + + // True, if this is a temporary workspace created for the purposes of + // cargo install or cargo package. + is_ephemeral: bool, + + // True if this workspace should enforce optional dependencies even when + // not needed; false if this workspace should only enforce dependencies + // needed by the current configuration (such as in cargo install). In some + // cases `false` also results in the non-enforcement of dev-dependencies. + require_optional_deps: bool, + + // A cache of lodaed packages for particular paths which is disjoint from + // `packages` up above, used in the `load` method down below. + loaded_packages: RefCell>, +} + +// Separate structure for tracking loaded packages (to avoid loading anything +// twice), and this is separate to help appease the borrow checker. +#[derive(Debug)] +struct Packages<'cfg> { + config: &'cfg Config, + packages: HashMap, +} + +#[derive(Debug)] +enum MaybePackage { + Package(Package), + Virtual(VirtualManifest), +} + +/// Configuration of a workspace in a manifest. +#[derive(Debug, Clone)] +pub enum WorkspaceConfig { + /// Indicates that `[workspace]` was present and the members were + /// optionally specified as well. + Root(WorkspaceRootConfig), + + /// Indicates that `[workspace]` was present and the `root` field is the + /// optional value of `package.workspace`, if present. + Member { root: Option }, +} + +/// Intermediate configuration of a workspace root in a manifest. +/// +/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which +/// together tell if some path is recognized as a member by this root or not. +#[derive(Debug, Clone)] +pub struct WorkspaceRootConfig { + root_dir: PathBuf, + members: Option>, + default_members: Option>, + exclude: Vec, +} + +/// An iterator over the member packages of a workspace, returned by +/// `Workspace::members` +pub struct Members<'a, 'cfg: 'a> { + ws: &'a Workspace<'cfg>, + iter: slice::Iter<'a, PathBuf>, +} + +impl<'cfg> Workspace<'cfg> { + /// Creates a new workspace given the target manifest pointed to by + /// `manifest_path`. + /// + /// This function will construct the entire workspace by determining the + /// root and all member packages. It will then validate the workspace + /// before returning it, so `Ok` is only returned for valid workspaces. + pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { + let target_dir = config.target_dir()?; + + let mut ws = Workspace { + config, + current_manifest: manifest_path.to_path_buf(), + packages: Packages { + config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir, + members: Vec::new(), + default_members: Vec::new(), + is_ephemeral: false, + require_optional_deps: true, + loaded_packages: RefCell::new(HashMap::new()), + }; + ws.root_manifest = ws.find_root(manifest_path)?; + ws.find_members()?; + ws.validate()?; + Ok(ws) + } + + /// Creates a "temporary workspace" from one package which only contains + /// that package. + /// + /// This constructor will not touch the filesystem and only creates an + /// in-memory workspace. That is, all configuration is ignored, it's just + /// intended for that one package. + /// + /// This is currently only used in niche situations like `cargo install` or + /// `cargo package`. + pub fn ephemeral( + package: Package, + config: &'cfg Config, + target_dir: Option, + require_optional_deps: bool, + ) -> CargoResult> { + let mut ws = Workspace { + config, + current_manifest: package.manifest_path().to_path_buf(), + packages: Packages { + config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir: None, + members: Vec::new(), + default_members: Vec::new(), + is_ephemeral: true, + require_optional_deps, + loaded_packages: RefCell::new(HashMap::new()), + }; + { + let key = ws.current_manifest.parent().unwrap(); + let package = MaybePackage::Package(package); + ws.packages.packages.insert(key.to_path_buf(), package); + ws.target_dir = if let Some(dir) = target_dir { + Some(dir) + } else { + ws.config.target_dir()? + }; + ws.members.push(ws.current_manifest.clone()); + ws.default_members.push(ws.current_manifest.clone()); + } + Ok(ws) + } + + /// Returns the current package of this workspace. + /// + /// Note that this can return an error if it the current manifest is + /// actually a "virtual Cargo.toml", in which case an error is returned + /// indicating that something else should be passed. + pub fn current(&self) -> CargoResult<&Package> { + let pkg = self.current_opt().ok_or_else(|| { + format_err!( + "manifest path `{}` is a virtual manifest, but this \ + command requires running against an actual package in \ + this workspace", + self.current_manifest.display() + ) + })?; + Ok(pkg) + } + + pub fn current_opt(&self) -> Option<&Package> { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(ref p) => Some(p), + MaybePackage::Virtual(..) => None, + } + } + + pub fn is_virtual(&self) -> bool { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(..) => false, + MaybePackage::Virtual(..) => true, + } + } + + /// Returns the `Config` this workspace is associated with. + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn profiles(&self) -> &Profiles { + let root = self.root_manifest + .as_ref() + .unwrap_or(&self.current_manifest); + match *self.packages.get(root) { + MaybePackage::Package(ref p) => p.manifest().profiles(), + MaybePackage::Virtual(ref vm) => vm.profiles(), + } + } + + /// Returns the root path of this workspace. + /// + /// That is, this returns the path of the directory containing the + /// `Cargo.toml` which is the root of this workspace. + pub fn root(&self) -> &Path { + match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + }.parent() + .unwrap() + } + + pub fn target_dir(&self) -> Filesystem { + self.target_dir + .clone() + .unwrap_or_else(|| Filesystem::new(self.root().join("target"))) + } + + /// Returns the root [replace] section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { + let path = match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + }; + match *self.packages.get(path) { + MaybePackage::Package(ref p) => p.manifest().replace(), + MaybePackage::Virtual(ref vm) => vm.replace(), + } + } + + /// Returns the root [patch] section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_patch(&self) -> &HashMap> { + let path = match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + }; + match *self.packages.get(path) { + MaybePackage::Package(ref p) => p.manifest().patch(), + MaybePackage::Virtual(ref vm) => vm.patch(), + } + } + + /// Returns an iterator over all packages in this workspace + pub fn members<'a>(&'a self) -> Members<'a, 'cfg> { + Members { + ws: self, + iter: self.members.iter(), + } + } + + /// Returns an iterator over default packages in this workspace + pub fn default_members<'a>(&'a self) -> Members<'a, 'cfg> { + Members { + ws: self, + iter: self.default_members.iter(), + } + } + + pub fn is_ephemeral(&self) -> bool { + self.is_ephemeral + } + + pub fn require_optional_deps(&self) -> bool { + self.require_optional_deps + } + + pub fn set_require_optional_deps<'a>( + &'a mut self, + require_optional_deps: bool, + ) -> &mut Workspace<'cfg> { + self.require_optional_deps = require_optional_deps; + self + } + + /// Finds the root of a workspace for the crate whose manifest is located + /// at `manifest_path`. + /// + /// This will parse the `Cargo.toml` at `manifest_path` and then interpret + /// the workspace configuration, optionally walking up the filesystem + /// looking for other workspace roots. + /// + /// Returns an error if `manifest_path` isn't actually a valid manifest or + /// if some other transient error happens. + fn find_root(&mut self, manifest_path: &Path) -> CargoResult> { + fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult { + let path = member_manifest + .parent() + .unwrap() + .join(root_link) + .join("Cargo.toml"); + debug!("find_root - pointer {}", path.display()); + Ok(paths::normalize_path(&path)) + }; + + { + let current = self.packages.load(manifest_path)?; + match *current.workspace_config() { + WorkspaceConfig::Root(_) => { + debug!("find_root - is root {}", manifest_path.display()); + return Ok(Some(manifest_path.to_path_buf())); + } + WorkspaceConfig::Member { + root: Some(ref path_to_root), + } => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)), + WorkspaceConfig::Member { root: None } => {} + } + } + + for path in paths::ancestors(manifest_path).skip(2) { + let ances_manifest_path = path.join("Cargo.toml"); + debug!("find_root - trying {}", ances_manifest_path.display()); + if ances_manifest_path.exists() { + match *self.packages.load(&ances_manifest_path)?.workspace_config() { + WorkspaceConfig::Root(ref ances_root_config) => { + debug!("find_root - found a root checking exclusion"); + if !ances_root_config.is_excluded(manifest_path) { + debug!("find_root - found!"); + return Ok(Some(ances_manifest_path)); + } + } + WorkspaceConfig::Member { + root: Some(ref path_to_root), + } => { + debug!("find_root - found pointer"); + return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?)); + } + WorkspaceConfig::Member { .. } => {} + } + } + + // Don't walk across `CARGO_HOME` when we're looking for the + // workspace root. Sometimes a project will be organized with + // `CARGO_HOME` pointing inside of the workspace root or in the + // current project, but we don't want to mistakenly try to put + // crates.io crates into the workspace by accident. + if self.config.home() == path { + break; + } + } + + Ok(None) + } + + /// After the root of a workspace has been located, probes for all members + /// of a workspace. + /// + /// If the `workspace.members` configuration is present, then this just + /// verifies that those are all valid packages to point to. Otherwise, this + /// will transitively follow all `path` dependencies looking for members of + /// the workspace. + fn find_members(&mut self) -> CargoResult<()> { + let root_manifest_path = match self.root_manifest { + Some(ref path) => path.clone(), + None => { + debug!("find_members - only me as a member"); + self.members.push(self.current_manifest.clone()); + self.default_members.push(self.current_manifest.clone()); + return Ok(()); + } + }; + + let members_paths; + let default_members_paths; + { + let root_package = self.packages.load(&root_manifest_path)?; + match *root_package.workspace_config() { + WorkspaceConfig::Root(ref root_config) => { + members_paths = + root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?; + default_members_paths = if let Some(ref default) = root_config.default_members { + Some(root_config.members_paths(default)?) + } else { + None + } + } + _ => bail!( + "root of a workspace inferred but wasn't a root: {}", + root_manifest_path.display() + ), + } + } + + for path in members_paths { + self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?; + } + + if let Some(default) = default_members_paths { + for path in default { + let manifest_path = paths::normalize_path(&path.join("Cargo.toml")); + if !self.members.contains(&manifest_path) { + bail!( + "package `{}` is listed in workspace’s default-members \ + but is not a member.", + path.display() + ) + } + self.default_members.push(manifest_path) + } + } else if self.is_virtual() { + self.default_members = self.members.clone() + } else { + self.default_members.push(self.current_manifest.clone()) + } + + self.find_path_deps(&root_manifest_path, &root_manifest_path, false) + } + + fn find_path_deps( + &mut self, + manifest_path: &Path, + root_manifest: &Path, + is_path_dep: bool, + ) -> CargoResult<()> { + let manifest_path = paths::normalize_path(manifest_path); + if self.members.contains(&manifest_path) { + return Ok(()); + } + if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root()) + && self.find_root(&manifest_path)? != self.root_manifest + { + // If `manifest_path` is a path dependency outside of the workspace, + // don't add it, or any of its dependencies, as a members. + return Ok(()); + } + + if let WorkspaceConfig::Root(ref root_config) = + *self.packages.load(root_manifest)?.workspace_config() + { + if root_config.is_excluded(&manifest_path) { + return Ok(()); + } + } + + debug!("find_members - {}", manifest_path.display()); + self.members.push(manifest_path.clone()); + + let candidates = { + let pkg = match *self.packages.load(&manifest_path)? { + MaybePackage::Package(ref p) => p, + MaybePackage::Virtual(_) => return Ok(()), + }; + pkg.dependencies() + .iter() + .map(|d| d.source_id()) + .filter(|d| d.is_path()) + .filter_map(|d| d.url().to_file_path().ok()) + .map(|p| p.join("Cargo.toml")) + .collect::>() + }; + for candidate in candidates { + self.find_path_deps(&candidate, root_manifest, true)?; + } + Ok(()) + } + + /// Validates a workspace, ensuring that a number of invariants are upheld: + /// + /// 1. A workspace only has one root. + /// 2. All workspace members agree on this one root as the root. + /// 3. The current crate is a member of this workspace. + fn validate(&mut self) -> CargoResult<()> { + if self.root_manifest.is_none() { + return Ok(()); + } + + let mut roots = Vec::new(); + { + let mut names = BTreeMap::new(); + for member in self.members.iter() { + let package = self.packages.get(member); + match *package.workspace_config() { + WorkspaceConfig::Root(_) => { + roots.push(member.parent().unwrap().to_path_buf()); + } + WorkspaceConfig::Member { .. } => {} + } + let name = match *package { + MaybePackage::Package(ref p) => p.name(), + MaybePackage::Virtual(_) => continue, + }; + if let Some(prev) = names.insert(name, member) { + bail!( + "two packages named `{}` in this workspace:\n\ + - {}\n\ + - {}", + name, + prev.display(), + member.display() + ); + } + } + } + + match roots.len() { + 0 => bail!( + "`package.workspace` configuration points to a crate \ + which is not configured with [workspace]: \n\ + configuration at: {}\n\ + points to: {}", + self.current_manifest.display(), + self.root_manifest.as_ref().unwrap().display() + ), + 1 => {} + _ => { + bail!( + "multiple workspace roots found in the same workspace:\n{}", + roots + .iter() + .map(|r| format!(" {}", r.display())) + .collect::>() + .join("\n") + ); + } + } + + for member in self.members.clone() { + let root = self.find_root(&member)?; + if root == self.root_manifest { + continue; + } + + match root { + Some(root) => { + bail!( + "package `{}` is a member of the wrong workspace\n\ + expected: {}\n\ + actual: {}", + member.display(), + self.root_manifest.as_ref().unwrap().display(), + root.display() + ); + } + None => { + bail!( + "workspace member `{}` is not hierarchically below \ + the workspace root `{}`", + member.display(), + self.root_manifest.as_ref().unwrap().display() + ); + } + } + } + + if !self.members.contains(&self.current_manifest) { + let root = self.root_manifest.as_ref().unwrap(); + let root_dir = root.parent().unwrap(); + let current_dir = self.current_manifest.parent().unwrap(); + let root_pkg = self.packages.get(root); + + // FIXME: Make this more generic by using a relative path resolver between member and + // root. + let members_msg = match current_dir.strip_prefix(root_dir) { + Ok(rel) => format!( + "this may be fixable by adding `{}` to the \ + `workspace.members` array of the manifest \ + located at: {}", + rel.display(), + root.display() + ), + Err(_) => format!( + "this may be fixable by adding a member to \ + the `workspace.members` array of the \ + manifest located at: {}", + root.display() + ), + }; + let extra = match *root_pkg { + MaybePackage::Virtual(_) => members_msg, + MaybePackage::Package(ref p) => { + let has_members_list = match *p.manifest().workspace_config() { + WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(), + WorkspaceConfig::Member { .. } => unreachable!(), + }; + if !has_members_list { + format!( + "this may be fixable by ensuring that this \ + crate is depended on by the workspace \ + root: {}", + root.display() + ) + } else { + members_msg + } + } + }; + bail!( + "current package believes it's in a workspace when it's not:\n\ + current: {}\n\ + workspace: {}\n\n{}", + self.current_manifest.display(), + root.display(), + extra + ); + } + + if let Some(ref root_manifest) = self.root_manifest { + let default_profiles = Profiles { + release: Profile::default_release(), + dev: Profile::default_dev(), + test: Profile::default_test(), + test_deps: Profile::default_dev(), + bench: Profile::default_bench(), + bench_deps: Profile::default_release(), + doc: Profile::default_doc(), + custom_build: Profile::default_custom_build(), + check: Profile::default_check(), + check_test: Profile::default_check_test(), + doctest: Profile::default_doctest(), + }; + + for pkg in self.members() + .filter(|p| p.manifest_path() != root_manifest) + { + if pkg.manifest().profiles() != &default_profiles { + let message = &format!( + "profiles for the non root package will be ignored, \ + specify profiles at the workspace root:\n\ + package: {}\n\ + workspace: {}", + pkg.manifest_path().display(), + root_manifest.display() + ); + + //TODO: remove `Eq` bound from `Profiles` when the warning is removed. + self.config.shell().warn(&message)?; + } + } + } + + Ok(()) + } + + pub fn load(&self, manifest_path: &Path) -> CargoResult { + match self.packages.maybe_get(manifest_path) { + Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), + Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), + None => {} + } + + let mut loaded = self.loaded_packages.borrow_mut(); + if let Some(p) = loaded.get(manifest_path).cloned() { + return Ok(p); + } + let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; + let (package, _nested_paths) = ops::read_package(manifest_path, &source_id, self.config)?; + loaded.insert(manifest_path.to_path_buf(), package.clone()); + Ok(package) + } + + /// Preload the provided registry with already loaded packages. + /// + /// A workspace may load packages during construction/parsing/early phases + /// for various operations, and this preload step avoids doubly-loading and + /// parsing crates on the filesystem by inserting them all into the registry + /// with their in-memory formats. + pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { + // These can get weird as this generally represents a workspace during + // `cargo install`. Things like git repositories will actually have a + // `PathSource` with multiple entries in it, so the logic below is + // mostly just an optimization for normal `cargo build` in workspaces + // during development. + if self.is_ephemeral { + return; + } + + for pkg in self.packages.packages.values() { + let pkg = match *pkg { + MaybePackage::Package(ref p) => p.clone(), + MaybePackage::Virtual(_) => continue, + }; + let mut src = PathSource::new( + pkg.manifest_path(), + pkg.package_id().source_id(), + self.config, + ); + src.preload_with(pkg); + registry.add_preloaded(Box::new(src)); + } + } +} + +impl<'cfg> Packages<'cfg> { + fn get(&self, manifest_path: &Path) -> &MaybePackage { + self.maybe_get(manifest_path).unwrap() + } + + fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> { + self.packages.get(manifest_path.parent().unwrap()) + } + + fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { + let key = manifest_path.parent().unwrap(); + match self.packages.entry(key.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(v) => { + let source_id = SourceId::for_path(key)?; + let (manifest, _nested_paths) = + read_manifest(manifest_path, &source_id, self.config)?; + Ok(v.insert(match manifest { + EitherManifest::Real(manifest) => { + MaybePackage::Package(Package::new(manifest, manifest_path)) + } + EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm), + })) + } + } + } +} + +impl<'a, 'cfg> Members<'a, 'cfg> { + pub fn is_empty(self) -> bool { + self.count() == 0 + } +} + +impl<'a, 'cfg> Iterator for Members<'a, 'cfg> { + type Item = &'a Package; + + fn next(&mut self) -> Option<&'a Package> { + loop { + let next = self.iter.next().map(|path| self.ws.packages.get(path)); + match next { + Some(&MaybePackage::Package(ref p)) => return Some(p), + Some(&MaybePackage::Virtual(_)) => {} + None => return None, + } + } + } +} + +impl MaybePackage { + fn workspace_config(&self) -> &WorkspaceConfig { + match *self { + MaybePackage::Package(ref p) => p.manifest().workspace_config(), + MaybePackage::Virtual(ref vm) => vm.workspace_config(), + } + } +} + +impl WorkspaceRootConfig { + /// Create a new Intermediate Workspace Root configuration. + pub fn new( + root_dir: &Path, + members: &Option>, + default_members: &Option>, + exclude: &Option>, + ) -> WorkspaceRootConfig { + WorkspaceRootConfig { + root_dir: root_dir.to_path_buf(), + members: members.clone(), + default_members: default_members.clone(), + exclude: exclude.clone().unwrap_or_default(), + } + } + + /// Checks the path against the `excluded` list. + /// + /// This method does NOT consider the `members` list. + fn is_excluded(&self, manifest_path: &Path) -> bool { + let excluded = self.exclude + .iter() + .any(|ex| manifest_path.starts_with(self.root_dir.join(ex))); + + let explicit_member = match self.members { + Some(ref members) => members + .iter() + .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))), + None => false, + }; + + !explicit_member && excluded + } + + fn has_members_list(&self) -> bool { + self.members.is_some() + } + + fn members_paths(&self, globs: &[String]) -> CargoResult> { + let mut expanded_list = Vec::new(); + + for glob in globs { + let pathbuf = self.root_dir.join(glob); + let expanded_paths = Self::expand_member_path(&pathbuf)?; + + // If glob does not find any valid paths, then put the original + // path in the expanded list to maintain backwards compatibility. + if expanded_paths.is_empty() { + expanded_list.push(pathbuf); + } else { + expanded_list.extend(expanded_paths); + } + } + + Ok(expanded_list) + } + + fn expand_member_path(path: &Path) -> CargoResult> { + let path = match path.to_str() { + Some(p) => p, + None => return Ok(Vec::new()), + }; + let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?; + let res = res.map(|p| { + p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path)) + }).collect::, _>>()?; + Ok(res) + } +} diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs new file mode 100644 index 000000000..07b2cd408 --- /dev/null +++ b/src/cargo/lib.rs @@ -0,0 +1,240 @@ +#![cfg_attr(test, deny(warnings))] +// Currently, Cargo does not use clippy for its source code. +// But if someone runs it they should know that +// @alexcrichton disagree with clippy on some style things +#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))] + +extern crate atty; +extern crate clap; +#[cfg(target_os = "macos")] +extern crate core_foundation; +extern crate crates_io as registry; +extern crate crossbeam; +extern crate curl; +#[macro_use] +extern crate failure; +extern crate filetime; +extern crate flate2; +extern crate fs2; +extern crate git2; +extern crate glob; +extern crate hex; +extern crate home; +extern crate ignore; +extern crate jobserver; +#[macro_use] +extern crate lazy_static; +extern crate lazycell; +extern crate libc; +extern crate libgit2_sys; +#[macro_use] +extern crate log; +extern crate num_cpus; +extern crate same_file; +extern crate semver; +extern crate serde; +#[macro_use] +extern crate serde_derive; +extern crate serde_ignored; +#[macro_use] +extern crate serde_json; +extern crate shell_escape; +extern crate tar; +extern crate tempfile; +extern crate termcolor; +extern crate toml; +extern crate url; + +use std::fmt; + +use serde::ser; +use failure::Error; + +use core::Shell; +use core::shell::Verbosity::Verbose; + +pub use util::{CargoError, CargoResult, CliError, CliResult, Config}; +pub use util::errors::Internal; + +pub const CARGO_ENV: &str = "CARGO"; + +pub mod core; +pub mod ops; +pub mod sources; +pub mod util; + +pub struct CommitInfo { + pub short_commit_hash: String, + pub commit_hash: String, + pub commit_date: String, +} + +pub struct CfgInfo { + // Information about the git repository we may have been built from. + pub commit_info: Option, + // The release channel we were built for. + pub release_channel: String, +} + +pub struct VersionInfo { + pub major: u8, + pub minor: u8, + pub patch: u8, + pub pre_release: Option, + // Information that's only available when we were built with + // configure/make, rather than cargo itself. + pub cfg_info: Option, +} + +impl fmt::Display for VersionInfo { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?; + if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { + if channel != "stable" { + write!(f, "-{}", channel)?; + let empty = String::from(""); + write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; + } + }; + + if let Some(ref cfg) = self.cfg_info { + if let Some(ref ci) = cfg.commit_info { + write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; + } + }; + Ok(()) + } +} + +pub fn print_json(obj: &T) { + let encoded = serde_json::to_string(&obj).unwrap(); + println!("{}", encoded); +} + +pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { + debug!("exit_with_error; err={:?}", err); + if let Some(ref err) = err.error { + if let Some(clap_err) = err.downcast_ref::() { + clap_err.exit() + } + } + + let CliError { + error, + exit_code, + unknown, + } = err; + // exit_code == 0 is non-fatal error, e.g. docopt version info + let fatal = exit_code != 0; + + let hide = unknown && shell.verbosity() != Verbose; + + if let Some(error) = error { + if hide { + drop(shell.error("An unknown error occurred")) + } else if fatal { + drop(shell.error(&error)) + } else { + println!("{}", error); + } + + if !handle_cause(&error, shell) || hide { + drop(writeln!( + shell.err(), + "\nTo learn more, run the command again \ + with --verbose." + )); + } + } + + std::process::exit(exit_code) +} + +pub fn handle_error(err: CargoError, shell: &mut Shell) { + debug!("handle_error; err={:?}", &err); + + let _ignored_result = shell.error(&err); + handle_cause(&err, shell); +} + +fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool { + fn print(error: String, shell: &mut Shell) { + drop(writeln!(shell.err(), "\nCaused by:")); + drop(writeln!(shell.err(), " {}", error)); + } + + let verbose = shell.verbosity(); + + if verbose == Verbose { + // The first error has already been printed to the shell + // Print all remaining errors + for err in cargo_err.causes().skip(1) { + print(err.to_string(), shell); + } + } else { + // The first error has already been printed to the shell + // Print remaining errors until one marked as Internal appears + for err in cargo_err.causes().skip(1) { + if err.downcast_ref::().is_some() { + return false; + } + + print(err.to_string(), shell); + } + } + + true +} + +pub fn version() -> VersionInfo { + macro_rules! option_env_str { + ($name:expr) => { option_env!($name).map(|s| s.to_string()) } + } + + // So this is pretty horrible... + // There are two versions at play here: + // - version of cargo-the-binary, which you see when you type `cargo --version` + // - version of cargo-the-library, which you download from crates.io for use + // in your projects. + // + // We want to make the `binary` version the same as the corresponding Rust/rustc release. + // At the same time, we want to keep the library version at `0.x`, because Cargo as + // a library is (and probably will always be) unstable. + // + // Historically, Cargo used the same version number for both the binary and the library. + // Specifically, rustc 1.x.z was paired with cargo 0.x+1.w. + // We continue to use this scheme for the library, but transform it to 1.x.w for the purposes + // of `cargo --version`. + let major = 1; + let minor = env!("CARGO_PKG_VERSION_MINOR").parse::().unwrap() - 1; + let patch = env!("CARGO_PKG_VERSION_PATCH").parse::().unwrap(); + + match option_env!("CFG_RELEASE_CHANNEL") { + // We have environment variables set up from configure/make. + Some(_) => { + let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo { + commit_hash: s.to_string(), + short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(), + commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(), + }); + VersionInfo { + major, + minor, + patch, + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: Some(CfgInfo { + release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(), + commit_info, + }), + } + } + // We are being compiled by Cargo itself. + None => VersionInfo { + major, + minor, + patch, + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: None, + }, + } +} diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs new file mode 100644 index 000000000..bc0bc51f8 --- /dev/null +++ b/src/cargo/ops/cargo_clean.rs @@ -0,0 +1,140 @@ +use std::default::Default; +use std::fs; +use std::path::Path; + +use core::{Profiles, Workspace}; +use util::Config; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; +use ops::{self, BuildConfig, Context, Kind, Unit}; + +pub struct CleanOptions<'a> { + pub config: &'a Config, + /// A list of packages to clean. If empty, everything is cleaned. + pub spec: Vec, + /// The target arch triple to clean, or None for the host arch + pub target: Option, + /// Whether to clean the release directory + pub release: bool, +} + +/// Cleans the project from build artifacts. +pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { + let target_dir = ws.target_dir(); + let config = ws.config(); + + // If we have a spec, then we need to delete some packages, otherwise, just + // remove the whole target directory and be done with it! + // + // Note that we don't bother grabbing a lock here as we're just going to + // blow it all away anyway. + if opts.spec.is_empty() { + let target_dir = target_dir.into_path_unlocked(); + return rm_rf(&target_dir, config); + } + + let (packages, resolve) = ops::resolve_ws(ws)?; + + let profiles = ws.profiles(); + let host_triple = opts.config.rustc()?.host.clone(); + let mut units = Vec::new(); + + for spec in opts.spec.iter() { + // Translate the spec to a Package + let pkgid = resolve.query(spec)?; + let pkg = packages.get(pkgid)?; + + // Generate all relevant `Unit` targets for this package + for target in pkg.targets() { + for kind in [Kind::Host, Kind::Target].iter() { + let Profiles { + ref release, + ref dev, + ref test, + ref bench, + ref doc, + ref custom_build, + ref test_deps, + ref bench_deps, + ref check, + ref check_test, + ref doctest, + } = *profiles; + let profiles = [ + release, + dev, + test, + bench, + doc, + custom_build, + test_deps, + bench_deps, + check, + check_test, + doctest, + ]; + for profile in profiles.iter() { + units.push(Unit { + pkg, + target, + profile, + kind: *kind, + }); + } + } + } + } + + let mut cx = Context::new( + ws, + &resolve, + &packages, + opts.config, + BuildConfig { + host_triple, + requested_target: opts.target.clone(), + release: opts.release, + jobs: 1, + ..BuildConfig::default() + }, + profiles, + &units, + )?; + + for unit in units.iter() { + rm_rf(&cx.fingerprint_dir(unit), config)?; + if unit.target.is_custom_build() { + if unit.profile.run_custom_build { + rm_rf(&cx.build_script_out_dir(unit), config)?; + } else { + rm_rf(&cx.build_script_dir(unit), config)?; + } + continue; + } + + for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() { + rm_rf(src, config)?; + if let Some(ref dst) = *link_dst { + rm_rf(dst, config)?; + } + } + } + + Ok(()) +} + +fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { + let m = fs::metadata(path); + if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { + config + .shell() + .verbose(|shell| shell.status("Removing", path.display()))?; + paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?; + } else if m.is_ok() { + config + .shell() + .verbose(|shell| shell.status("Removing", path.display()))?; + paths::remove_file(path).chain_err(|| format_err!("failed to remove build artifact"))?; + } + Ok(()) +} diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs new file mode 100644 index 000000000..74cc8949a --- /dev/null +++ b/src/cargo/ops/cargo_compile.rs @@ -0,0 +1,943 @@ +//! +//! Cargo compile currently does the following steps: +//! +//! All configurations are already injected as environment variables via the +//! main cargo command +//! +//! 1. Read the manifest +//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as +//! stdin +//! +//! a. Shell out to `--do update` and `--do list` for each source +//! b. Resolve dependencies and return a list of name/version/source +//! +//! 3. Shell out to `--do download` for each source +//! 4. Shell out to `--do get` for each source, and build up the list of paths +//! to pass to rustc -L +//! 5. Call `cargo-rustc` with the results of the resolver zipped together with +//! the results of the `get` +//! +//! a. Topologically sort the dependencies +//! b. Compile each dependency in order, passing in the -L's pointing at each +//! previously compiled dependency +//! + +use std::collections::{HashMap, HashSet}; +use std::default::Default; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use core::{Package, Source, Target}; +use core::{PackageId, PackageIdSpec, Profile, Profiles, TargetKind, Workspace}; +use core::resolver::{Method, Resolve}; +use ops::{self, BuildOutput, DefaultExecutor, Executor}; +use util::config::Config; +use util::{profile, CargoResult, CargoResultExt}; + +/// Contains information about how a package should be compiled. +#[derive(Debug)] +pub struct CompileOptions<'a> { + pub config: &'a Config, + /// Number of concurrent jobs to use. + pub jobs: Option, + /// The target platform to compile for (example: `i686-unknown-linux-gnu`). + pub target: Option, + /// Extra features to build for the root package + pub features: Vec, + /// Flag whether all available features should be built for the root package + pub all_features: bool, + /// Flag if the default feature should be built for the root package + pub no_default_features: bool, + /// A set of packages to build. + pub spec: Packages, + /// Filter to apply to the root package to select which targets will be + /// built. + pub filter: CompileFilter, + /// Whether this is a release build or not + pub release: bool, + /// Mode for this compile. + pub mode: CompileMode, + /// `--error_format` flag for the compiler. + pub message_format: MessageFormat, + /// Extra arguments to be passed to rustdoc (for main crate and dependencies) + pub target_rustdoc_args: Option>, + /// The specified target will be compiled with all the available arguments, + /// note that this only accounts for the *final* invocation of rustc + pub target_rustc_args: Option>, +} + +impl<'a> CompileOptions<'a> { + pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> { + CompileOptions { + config, + jobs: None, + target: None, + features: Vec::new(), + all_features: false, + no_default_features: false, + spec: ops::Packages::Packages(Vec::new()), + mode, + release: false, + filter: CompileFilter::Default { + required_features_filterable: false, + }, + message_format: MessageFormat::Human, + target_rustdoc_args: None, + target_rustc_args: None, + } + } +} + +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum CompileMode { + Test, + Build, + Check { test: bool }, + Bench, + Doc { deps: bool }, + Doctest, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum MessageFormat { + Human, + Json, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum Packages { + Default, + All, + OptOut(Vec), + Packages(Vec), +} + +impl Packages { + pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { + Ok(match (all, exclude.len(), package.len()) { + (false, 0, 0) => Packages::Default, + (false, 0, _) => Packages::Packages(package), + (false, _, _) => bail!("--exclude can only be used together with --all"), + (true, 0, _) => Packages::All, + (true, _, _) => Packages::OptOut(exclude), + }) + } + + pub fn into_package_id_specs(&self, ws: &Workspace) -> CargoResult> { + let specs = match *self { + Packages::All => ws.members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + Packages::OptOut(ref opt_out) => ws.members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none()) + .collect(), + Packages::Packages(ref packages) if packages.is_empty() => ws.current_opt() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .into_iter() + .collect(), + Packages::Packages(ref packages) => packages + .iter() + .map(|p| PackageIdSpec::parse(p)) + .collect::>>()?, + Packages::Default => ws.default_members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + }; + if specs.is_empty() { + if ws.is_virtual() { + bail!( + "manifest path `{}` contains no package: The manifest is virtual, \ + and the workspace has no members.", + ws.root().display() + ) + } + bail!("no packages to compile") + } + Ok(specs) + } +} + +#[derive(Debug)] +pub enum FilterRule { + All, + Just(Vec), +} + +#[derive(Debug)] +pub enum CompileFilter { + Default { + /// Flag whether targets can be safely skipped when required-features are not satisfied. + required_features_filterable: bool, + }, + Only { + all_targets: bool, + lib: bool, + bins: FilterRule, + examples: FilterRule, + tests: FilterRule, + benches: FilterRule, + }, +} + +pub fn compile<'a>( + ws: &Workspace<'a>, + options: &CompileOptions<'a>, +) -> CargoResult> { + compile_with_exec(ws, options, Arc::new(DefaultExecutor)) +} + +pub fn compile_with_exec<'a>( + ws: &Workspace<'a>, + options: &CompileOptions<'a>, + exec: Arc, +) -> CargoResult> { + for member in ws.members() { + for warning in member.manifest().warnings().iter() { + if warning.is_critical { + let err = format_err!("{}", warning.message); + let cx = format_err!( + "failed to parse manifest at `{}`", + member.manifest_path().display() + ); + return Err(err.context(cx).into()); + } else { + options.config.shell().warn(&warning.message)? + } + } + } + compile_ws(ws, None, options, exec) +} + +pub fn compile_ws<'a>( + ws: &Workspace<'a>, + source: Option>, + options: &CompileOptions<'a>, + exec: Arc, +) -> CargoResult> { + let CompileOptions { + config, + jobs, + ref target, + ref spec, + ref features, + all_features, + no_default_features, + release, + mode, + message_format, + ref filter, + ref target_rustdoc_args, + ref target_rustc_args, + } = *options; + + let target = match target { + &Some(ref target) if target.ends_with(".json") => { + let path = Path::new(target) + .canonicalize() + .chain_err(|| format_err!("Target path {:?} is not a valid file", target))?; + Some(path.into_os_string() + .into_string() + .map_err(|_| format_err!("Target path is not valid unicode"))?) + } + other => other.clone(), + }; + + if jobs == Some(0) { + bail!("jobs must be at least 1") + } + + let profiles = ws.profiles(); + + let specs = spec.into_package_id_specs(ws)?; + let features = Method::split_features(features); + let method = Method::Required { + dev_deps: ws.require_optional_deps() || filter.need_dev_deps(mode), + features: &features, + all_features, + uses_default_features: !no_default_features, + }; + let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?; + let (packages, resolve_with_overrides) = resolve; + + let to_builds = specs + .iter() + .map(|p| { + let pkgid = p.query(resolve_with_overrides.iter())?; + let p = packages.get(pkgid)?; + p.manifest().print_teapot(ws.config()); + Ok(p) + }) + .collect::>>()?; + + let mut general_targets = Vec::new(); + let mut package_targets = Vec::new(); + + match (target_rustc_args, target_rustdoc_args) { + (&Some(..), _) | (_, &Some(..)) if to_builds.len() != 1 => { + panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags") + } + (&Some(ref args), _) => { + let all_features = + resolve_all_features(&resolve_with_overrides, to_builds[0].package_id()); + let targets = + generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?; + if targets.len() == 1 { + let (target, profile) = targets[0]; + let mut profile = profile.clone(); + profile.rustc_args = Some(args.to_vec()); + general_targets.push((target, profile)); + } else { + bail!( + "extra arguments to `rustc` can only be passed to one \ + target, consider filtering\nthe package by passing \ + e.g. `--lib` or `--bin NAME` to specify a single target" + ) + } + } + (&None, &Some(ref args)) => { + let all_features = + resolve_all_features(&resolve_with_overrides, to_builds[0].package_id()); + let targets = + generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?; + if targets.len() == 1 { + let (target, profile) = targets[0]; + let mut profile = profile.clone(); + profile.rustdoc_args = Some(args.to_vec()); + general_targets.push((target, profile)); + } else { + bail!( + "extra arguments to `rustdoc` can only be passed to one \ + target, consider filtering\nthe package by passing e.g. \ + `--lib` or `--bin NAME` to specify a single target" + ) + } + } + (&None, &None) => for &to_build in to_builds.iter() { + let all_features = resolve_all_features(&resolve_with_overrides, to_build.package_id()); + let targets = + generate_targets(to_build, profiles, mode, filter, &all_features, release)?; + package_targets.push((to_build, targets)); + }, + }; + + for &(target, ref profile) in &general_targets { + for &to_build in to_builds.iter() { + package_targets.push((to_build, vec![(target, profile)])); + } + } + + let mut ret = { + let _p = profile::start("compiling"); + let mut build_config = scrape_build_config(config, jobs, target)?; + build_config.release = release; + build_config.test = mode == CompileMode::Test || mode == CompileMode::Bench; + build_config.json_messages = message_format == MessageFormat::Json; + if let CompileMode::Doc { deps } = mode { + build_config.doc_all = deps; + } + + ops::compile_targets( + ws, + &package_targets, + &packages, + &resolve_with_overrides, + config, + build_config, + profiles, + &exec, + )? + }; + + ret.to_doc_test = to_builds.into_iter().cloned().collect(); + + return Ok(ret); + + fn resolve_all_features( + resolve_with_overrides: &Resolve, + package_id: &PackageId, + ) -> HashSet { + let mut features = resolve_with_overrides.features(package_id).clone(); + + // Include features enabled for use by dependencies so targets can also use them with the + // required-features field when deciding whether to be built or skipped. + let deps = resolve_with_overrides.deps(package_id); + for dep in deps { + for feature in resolve_with_overrides.features(dep) { + features.insert(dep.name().to_string() + "/" + feature); + } + } + + features + } +} + +impl FilterRule { + pub fn new(targets: Vec, all: bool) -> FilterRule { + if all { + FilterRule::All + } else { + FilterRule::Just(targets) + } + } + + fn matches(&self, target: &Target) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), + } + } + + fn is_specific(&self) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => !targets.is_empty(), + } + } + + pub fn try_collect(&self) -> Option> { + match *self { + FilterRule::All => None, + FilterRule::Just(ref targets) => Some(targets.clone()), + } + } +} + +impl CompileFilter { + pub fn new( + lib_only: bool, + bins: Vec, + all_bins: bool, + tsts: Vec, + all_tsts: bool, + exms: Vec, + all_exms: bool, + bens: Vec, + all_bens: bool, + all_targets: bool, + ) -> CompileFilter { + let rule_bins = FilterRule::new(bins, all_bins); + let rule_tsts = FilterRule::new(tsts, all_tsts); + let rule_exms = FilterRule::new(exms, all_exms); + let rule_bens = FilterRule::new(bens, all_bens); + + if all_targets { + CompileFilter::Only { + all_targets: true, + lib: true, + bins: FilterRule::All, + examples: FilterRule::All, + benches: FilterRule::All, + tests: FilterRule::All, + } + } else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific() + || rule_exms.is_specific() || rule_bens.is_specific() + { + CompileFilter::Only { + all_targets: false, + lib: lib_only, + bins: rule_bins, + examples: rule_exms, + benches: rule_bens, + tests: rule_tsts, + } + } else { + CompileFilter::Default { + required_features_filterable: true, + } + } + } + + pub fn need_dev_deps(&self, mode: CompileMode) -> bool { + match mode { + CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, + CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Check { .. } => match *self + { + CompileFilter::Default { .. } => false, + CompileFilter::Only { + ref examples, + ref tests, + ref benches, + .. + } => examples.is_specific() || tests.is_specific() || benches.is_specific(), + }, + } + } + + // this selects targets for "cargo run". for logic to select targets for + // other subcommands, see generate_targets and generate_default_targets + pub fn target_run(&self, target: &Target) -> bool { + match *self { + CompileFilter::Default { .. } => true, + CompileFilter::Only { + lib, + ref bins, + ref examples, + ref tests, + ref benches, + .. + } => { + let rule = match *target.kind() { + TargetKind::Bin => bins, + TargetKind::Test => tests, + TargetKind::Bench => benches, + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, + TargetKind::Lib(..) => return lib, + TargetKind::CustomBuild => return false, + }; + rule.matches(target) + } + } + } + + pub fn is_specific(&self) -> bool { + match *self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { .. } => true, + } + } +} + +#[derive(Clone, Copy, Debug)] +struct BuildProposal<'a> { + target: &'a Target, + profile: &'a Profile, + required: bool, +} + +fn generate_default_targets<'a>( + mode: CompileMode, + targets: &'a [Target], + profile: &'a Profile, + dep: &'a Profile, + required_features_filterable: bool, +) -> Vec> { + match mode { + CompileMode::Bench => targets + .iter() + .filter(|t| t.benched()) + .map(|t| BuildProposal { + target: t, + profile, + required: !required_features_filterable, + }) + .collect::>(), + CompileMode::Test => { + let mut base = targets + .iter() + .filter(|t| t.tested()) + .map(|t| BuildProposal { + target: t, + profile: if t.is_example() { dep } else { profile }, + required: !required_features_filterable, + }) + .collect::>(); + + // Always compile the library if we're testing everything as + // it'll be needed for doctests + if let Some(t) = targets.iter().find(|t| t.is_lib()) { + if t.doctested() { + base.push(BuildProposal { + target: t, + profile: dep, + required: !required_features_filterable, + }); + } + } + base + } + CompileMode::Build | CompileMode::Check { .. } => targets + .iter() + .filter(|t| t.is_bin() || t.is_lib()) + .map(|t| BuildProposal { + target: t, + profile, + required: !required_features_filterable, + }) + .collect(), + CompileMode::Doc { .. } => targets + .iter() + .filter(|t| { + t.documented() + && (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) + }) + .map(|t| BuildProposal { + target: t, + profile, + required: !required_features_filterable, + }) + .collect(), + CompileMode::Doctest => { + if let Some(t) = targets.iter().find(|t| t.is_lib()) { + if t.doctested() { + return vec![ + BuildProposal { + target: t, + profile, + required: !required_features_filterable, + }, + ]; + } + } + + Vec::new() + } + } +} + +/// Given a filter rule and some context, propose a list of targets +fn propose_indicated_targets<'a>( + pkg: &'a Package, + rule: &FilterRule, + desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + profile: &'a Profile, +) -> CargoResult>> { + match *rule { + FilterRule::All => { + let result = pkg.targets() + .iter() + .filter(|t| is_expected_kind(t)) + .map(|t| BuildProposal { + target: t, + profile, + required: false, + }); + Ok(result.collect()) + } + FilterRule::Just(ref names) => { + let mut targets = Vec::new(); + for name in names { + let target = pkg.targets() + .iter() + .find(|t| t.name() == *name && is_expected_kind(t)); + let t = match target { + Some(t) => t, + None => { + let suggestion = pkg.find_closest_target(name, is_expected_kind); + match suggestion { + Some(s) => { + let suggested_name = s.name(); + bail!( + "no {} target named `{}`\n\nDid you mean `{}`?", + desc, + name, + suggested_name + ) + } + None => bail!("no {} target named `{}`", desc, name), + } + } + }; + debug!("found {} `{}`", desc, name); + targets.push(BuildProposal { + target: t, + profile, + required: true, + }); + } + Ok(targets) + } + } +} + +/// Collect the targets that are libraries or have all required features available. +fn filter_compatible_targets<'a>( + mut proposals: Vec>, + features: &HashSet, +) -> CargoResult> { + let mut compatible = Vec::with_capacity(proposals.len()); + for proposal in proposals.drain(..) { + let unavailable_features = match proposal.target.required_features() { + Some(rf) => rf.iter().filter(|f| !features.contains(*f)).collect(), + None => Vec::new(), + }; + if proposal.target.is_lib() || unavailable_features.is_empty() { + compatible.push((proposal.target, proposal.profile)); + } else if proposal.required { + let required_features = proposal.target.required_features().unwrap(); + let quoted_required_features: Vec = required_features + .iter() + .map(|s| format!("`{}`", s)) + .collect(); + bail!( + "target `{}` requires the features: {}\n\ + Consider enabling them by passing e.g. `--features=\"{}\"`", + proposal.target.name(), + quoted_required_features.join(", "), + required_features.join(" ") + ); + } + } + Ok(compatible) +} + +/// Given the configuration for a build, this function will generate all +/// target/profile combinations needed to be built. +fn generate_targets<'a>( + pkg: &'a Package, + profiles: &'a Profiles, + mode: CompileMode, + filter: &CompileFilter, + features: &HashSet, + release: bool, +) -> CargoResult> { + let build = if release { + &profiles.release + } else { + &profiles.dev + }; + let test = if release { + &profiles.bench + } else { + &profiles.test + }; + let profile = match mode { + CompileMode::Test => test, + CompileMode::Bench => &profiles.bench, + CompileMode::Build => build, + CompileMode::Check { test: false } => &profiles.check, + CompileMode::Check { test: true } => &profiles.check_test, + CompileMode::Doc { .. } => &profiles.doc, + CompileMode::Doctest => &profiles.doctest, + }; + + let test_profile = if profile.check { + &profiles.check_test + } else if mode == CompileMode::Build { + test + } else { + profile + }; + + let bench_profile = if profile.check { + &profiles.check_test + } else if mode == CompileMode::Build { + &profiles.bench + } else { + profile + }; + + let targets = match *filter { + CompileFilter::Default { + required_features_filterable, + } => { + let deps = if release { + &profiles.bench_deps + } else { + &profiles.test_deps + }; + generate_default_targets( + mode, + pkg.targets(), + profile, + deps, + required_features_filterable, + ) + } + CompileFilter::Only { + all_targets, + lib, + ref bins, + ref examples, + ref tests, + ref benches, + } => { + let mut targets = Vec::new(); + + if lib { + if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { + targets.push(BuildProposal { + target: t, + profile, + required: true, + }); + } else if !all_targets { + bail!("no library targets found") + } + } + targets.append(&mut propose_indicated_targets( + pkg, + bins, + "bin", + Target::is_bin, + profile, + )?); + targets.append(&mut propose_indicated_targets( + pkg, + examples, + "example", + Target::is_example, + profile, + )?); + // If --tests was specified, add all targets that would be + // generated by `cargo test`. + let test_filter = match *tests { + FilterRule::All => Target::tested, + FilterRule::Just(_) => Target::is_test, + }; + targets.append(&mut propose_indicated_targets( + pkg, + tests, + "test", + test_filter, + test_profile, + )?); + // If --benches was specified, add all targets that would be + // generated by `cargo bench`. + let bench_filter = match *benches { + FilterRule::All => Target::benched, + FilterRule::Just(_) => Target::is_bench, + }; + targets.append(&mut propose_indicated_targets( + pkg, + benches, + "bench", + bench_filter, + bench_profile, + )?); + targets + } + }; + + filter_compatible_targets(targets, features) +} + +/// Parse all config files to learn about build configuration. Currently +/// configured options are: +/// +/// * build.jobs +/// * build.target +/// * target.$target.ar +/// * target.$target.linker +/// * target.$target.libfoo.metadata +fn scrape_build_config( + config: &Config, + jobs: Option, + target: Option, +) -> CargoResult { + if jobs.is_some() && config.jobserver_from_env().is_some() { + config.shell().warn( + "a `-j` argument was passed to Cargo but Cargo is \ + also configured with an external jobserver in \ + its environment, ignoring the `-j` parameter", + )?; + } + let cfg_jobs = match config.get_i64("build.jobs")? { + Some(v) => { + if v.val <= 0 { + bail!( + "build.jobs must be positive, but found {} in {}", + v.val, + v.definition + ) + } else if v.val >= i64::from(u32::max_value()) { + bail!( + "build.jobs is too large: found {} in {}", + v.val, + v.definition + ) + } else { + Some(v.val as u32) + } + } + None => None, + }; + let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); + let cfg_target = config.get_string("build.target")?.map(|s| s.val); + let target = target.or(cfg_target); + let mut base = ops::BuildConfig { + host_triple: config.rustc()?.host.clone(), + requested_target: target.clone(), + jobs, + ..Default::default() + }; + base.host = scrape_target_config(config, &base.host_triple)?; + base.target = match target.as_ref() { + Some(triple) => scrape_target_config(config, triple)?, + None => base.host.clone(), + }; + Ok(base) +} + +fn scrape_target_config(config: &Config, triple: &str) -> CargoResult { + let key = format!("target.{}", triple); + let mut ret = ops::TargetConfig { + ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val), + linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val), + overrides: HashMap::new(), + }; + let table = match config.get_table(&key)? { + Some(table) => table.val, + None => return Ok(ret), + }; + for (lib_name, value) in table { + match lib_name.as_str() { + "ar" | "linker" | "runner" | "rustflags" => continue, + _ => {} + } + + let mut output = BuildOutput { + library_paths: Vec::new(), + library_links: Vec::new(), + cfgs: Vec::new(), + env: Vec::new(), + metadata: Vec::new(), + rerun_if_changed: Vec::new(), + rerun_if_env_changed: Vec::new(), + warnings: Vec::new(), + }; + // We require deterministic order of evaluation, so we must sort the pairs by key first. + let mut pairs = Vec::new(); + for (k, value) in value.table(&lib_name)?.0 { + pairs.push((k, value)); + } + pairs.sort_by_key(|p| p.0); + for (k, value) in pairs { + let key = format!("{}.{}", key, k); + match &k[..] { + "rustc-flags" => { + let (flags, definition) = value.string(k)?; + let whence = format!("in `{}` (in {})", key, definition.display()); + let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?; + output.library_paths.extend(paths); + output.library_links.extend(links); + } + "rustc-link-lib" => { + let list = value.list(k)?; + output + .library_links + .extend(list.iter().map(|v| v.0.clone())); + } + "rustc-link-search" => { + let list = value.list(k)?; + output + .library_paths + .extend(list.iter().map(|v| PathBuf::from(&v.0))); + } + "rustc-cfg" => { + let list = value.list(k)?; + output.cfgs.extend(list.iter().map(|v| v.0.clone())); + } + "rustc-env" => for (name, val) in value.table(k)?.0 { + let val = val.string(name)?.0; + output.env.push((name.clone(), val.to_string())); + }, + "warning" | "rerun-if-changed" | "rerun-if-env-changed" => { + bail!("`{}` is not supported in build script overrides", k); + } + _ => { + let val = value.string(k)?.0; + output.metadata.push((k.clone(), val.to_string())); + } + } + } + ret.overrides.insert(lib_name, output); + } + + Ok(ret) +} diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs new file mode 100644 index 000000000..45c51974b --- /dev/null +++ b/src/cargo/ops/cargo_doc.rs @@ -0,0 +1,148 @@ +use std::collections::HashMap; +use std::fs; +use std::path::Path; +use std::process::Command; + +use core::Workspace; +use ops; +use util::CargoResult; + +pub struct DocOptions<'a> { + pub open_result: bool, + pub compile_opts: ops::CompileOptions<'a>, +} + +pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { + let specs = options.compile_opts.spec.into_package_id_specs(ws)?; + let resolve = ops::resolve_ws_precisely( + ws, + None, + &options.compile_opts.features, + options.compile_opts.all_features, + options.compile_opts.no_default_features, + &specs, + )?; + let (packages, resolve_with_overrides) = resolve; + + let pkgs = specs + .iter() + .map(|p| { + let pkgid = p.query(resolve_with_overrides.iter())?; + packages.get(pkgid) + }) + .collect::>>()?; + + let mut lib_names = HashMap::new(); + let mut bin_names = HashMap::new(); + for package in &pkgs { + for target in package.targets().iter().filter(|t| t.documented()) { + if target.is_lib() { + if let Some(prev) = lib_names.insert(target.crate_name(), package) { + bail!( + "The library `{}` is specified by packages `{}` and \ + `{}` but can only be documented once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), + prev, + package + ); + } + } else if let Some(prev) = bin_names.insert(target.crate_name(), package) { + bail!( + "The binary `{}` is specified by packages `{}` and \ + `{}` but can be documented only once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), + prev, + package + ); + } + } + } + + ops::compile(ws, &options.compile_opts)?; + + if options.open_result { + let name = if pkgs.len() > 1 { + bail!( + "Passing multiple packages and `open` is not supported.\n\ + Please re-run this command with `-p ` where `` \ + is one of the following:\n {}", + pkgs.iter() + .map(|p| p.name().to_inner()) + .collect::>() + .join("\n ") + ); + } else if pkgs.len() == 1 { + pkgs[0].name().replace("-", "_") + } else { + match lib_names.keys().chain(bin_names.keys()).nth(0) { + Some(s) => s.to_string(), + None => return Ok(()), + } + }; + + // Don't bother locking here as if this is getting deleted there's + // nothing we can do about it and otherwise if it's getting overwritten + // then that's also ok! + let mut target_dir = ws.target_dir(); + if let Some(ref triple) = options.compile_opts.target { + target_dir.push(Path::new(triple).file_stem().unwrap()); + } + let path = target_dir.join("doc").join(&name).join("index.html"); + let path = path.into_path_unlocked(); + if fs::metadata(&path).is_ok() { + let mut shell = options.compile_opts.config.shell(); + shell.status("Opening", path.display())?; + match open_docs(&path) { + Ok(m) => shell.status("Launching", m)?, + Err(e) => { + shell.warn("warning: could not determine a browser to open docs with, tried:")?; + for method in e { + shell.warn(format!("\t{}", method))?; + } + } + } + } + } + + Ok(()) +} + +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + use std::env; + let mut methods = Vec::new(); + // trying $BROWSER + if let Ok(name) = env::var("BROWSER") { + match Command::new(name).arg(path).status() { + Ok(_) => return Ok("$BROWSER"), + Err(_) => methods.push("$BROWSER"), + } + } + + for m in ["xdg-open", "gnome-open", "kde-open"].iter() { + match Command::new(m).arg(path).status() { + Ok(_) => return Ok(m), + Err(_) => methods.push(m), + } + } + + Err(methods) +} + +#[cfg(target_os = "windows")] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + match Command::new("cmd").arg("/C").arg(path).status() { + Ok(_) => Ok("cmd /C"), + Err(_) => Err(vec!["cmd /C"]), + } +} + +#[cfg(target_os = "macos")] +fn open_docs(path: &Path) -> Result<&'static str, Vec<&'static str>> { + match Command::new("open").arg(path).status() { + Ok(_) => Ok("open"), + Err(_) => Err(vec!["open"]), + } +} diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs new file mode 100644 index 000000000..c9ac0012b --- /dev/null +++ b/src/cargo/ops/cargo_fetch.rs @@ -0,0 +1,12 @@ +use core::{PackageSet, Resolve, Workspace}; +use ops; +use util::CargoResult; + +/// Executes `cargo fetch`. +pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> { + let (packages, resolve) = ops::resolve_ws(ws)?; + for id in resolve.iter() { + packages.get(id)?; + } + Ok((resolve, packages)) +} diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs new file mode 100644 index 000000000..741974e3b --- /dev/null +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -0,0 +1,213 @@ +use std::collections::{BTreeMap, HashSet}; + +use termcolor::Color::{self, Cyan, Green, Red}; + +use core::PackageId; +use core::registry::PackageRegistry; +use core::{Resolve, SourceId, Workspace}; +use core::resolver::Method; +use ops; +use util::config::Config; +use util::CargoResult; + +pub struct UpdateOptions<'a> { + pub config: &'a Config, + pub to_update: Vec, + pub precise: Option<&'a str>, + pub aggressive: bool, +} + +pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = ops::resolve_with_previous( + &mut registry, + ws, + Method::Everything, + None, + None, + &[], + true, + true, + )?; + ops::write_pkg_lockfile(ws, &resolve)?; + Ok(()) +} + +pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> { + if opts.aggressive && opts.precise.is_some() { + bail!("cannot specify both aggressive and precise simultaneously") + } + + if ws.members().is_empty() { + bail!("you can't generate a lockfile for an empty workspace.") + } + + if opts.config.cli_unstable().offline { + bail!("you can't update in the offline mode"); + } + + let previous_resolve = match ops::load_pkg_lockfile(ws)? { + Some(resolve) => resolve, + None => return generate_lockfile(ws), + }; + let mut registry = PackageRegistry::new(opts.config)?; + let mut to_avoid = HashSet::new(); + + if opts.to_update.is_empty() { + to_avoid.extend(previous_resolve.iter()); + } else { + let mut sources = Vec::new(); + for name in opts.to_update.iter() { + let dep = previous_resolve.query(name)?; + if opts.aggressive { + fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); + } else { + to_avoid.insert(dep); + sources.push(match opts.precise { + Some(precise) => { + // TODO: see comment in `resolve.rs` as well, but this + // seems like a pretty hokey reason to single out + // the registry as well. + let precise = if dep.source_id().is_registry() { + format!("{}={}->{}", dep.name(), dep.version(), precise) + } else { + precise.to_string() + }; + dep.source_id().clone().with_precise(Some(precise)) + } + None => dep.source_id().clone().with_precise(None), + }); + } + } + registry.add_sources(&sources)?; + } + + let resolve = ops::resolve_with_previous( + &mut registry, + ws, + Method::Everything, + Some(&previous_resolve), + Some(&to_avoid), + &[], + true, + true, + )?; + + // Summarize what is changing for the user. + let print_change = |status: &str, msg: String, color: Color| { + opts.config.shell().status_with_color(status, msg, color) + }; + for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { + if removed.len() == 1 && added.len() == 1 { + let msg = if removed[0].source_id().is_git() { + format!( + "{} -> #{}", + removed[0], + &added[0].source_id().precise().unwrap()[..8] + ) + } else { + format!("{} -> v{}", removed[0], added[0].version()) + }; + print_change("Updating", msg, Green)?; + } else { + for package in removed.iter() { + print_change("Removing", format!("{}", package), Red)?; + } + for package in added.iter() { + print_change("Adding", format!("{}", package), Cyan)?; + } + } + } + + ops::write_pkg_lockfile(ws, &resolve)?; + return Ok(()); + + fn fill_with_deps<'a>( + resolve: &'a Resolve, + dep: &'a PackageId, + set: &mut HashSet<&'a PackageId>, + visited: &mut HashSet<&'a PackageId>, + ) { + if !visited.insert(dep) { + return; + } + set.insert(dep); + for dep in resolve.deps(dep) { + fill_with_deps(resolve, dep, set, visited); + } + } + + fn compare_dependency_graphs<'a>( + previous_resolve: &'a Resolve, + resolve: &'a Resolve, + ) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { + fn key(dep: &PackageId) -> (&str, &SourceId) { + (dep.name().to_inner(), dep.source_id()) + } + + // Removes all package ids in `b` from `a`. Note that this is somewhat + // more complicated because the equality for source ids does not take + // precise versions into account (e.g. git shas), but we want to take + // that into account here. + fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> { + a.iter() + .filter(|a| { + // If this package id is not found in `b`, then it's definitely + // in the subtracted set + let i = match b.binary_search(a) { + Ok(i) => i, + Err(..) => return true, + }; + + // If we've found `a` in `b`, then we iterate over all instances + // (we know `b` is sorted) and see if they all have different + // precise versions. If so, then `a` isn't actually in `b` so + // we'll let it through. + // + // Note that we only check this for non-registry sources, + // however, as registries contain enough version information in + // the package id to disambiguate + if a.source_id().is_registry() { + return false; + } + b[i..] + .iter() + .take_while(|b| a == b) + .all(|b| a.source_id().precise() != b.source_id().precise()) + }) + .cloned() + .collect() + } + + // Map (package name, package source) to (removed versions, added versions). + let mut changes = BTreeMap::new(); + let empty = (Vec::new(), Vec::new()); + for dep in previous_resolve.iter() { + changes + .entry(key(dep)) + .or_insert_with(|| empty.clone()) + .0 + .push(dep); + } + for dep in resolve.iter() { + changes + .entry(key(dep)) + .or_insert_with(|| empty.clone()) + .1 + .push(dep); + } + + for v in changes.values_mut() { + let (ref mut old, ref mut new) = *v; + old.sort(); + new.sort(); + let removed = vec_subtract(old, new); + let added = vec_subtract(new, old); + *old = removed; + *new = added; + } + debug!("{:#?}", changes); + + changes.into_iter().map(|(_, v)| v).collect() + } +} diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs new file mode 100644 index 000000000..10449b95e --- /dev/null +++ b/src/cargo/ops/cargo_install.rs @@ -0,0 +1,801 @@ +use std::collections::btree_map::Entry; +use std::collections::{BTreeMap, BTreeSet}; +use std::{env, fs}; +use std::io::prelude::*; +use std::io::SeekFrom; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use semver::{Version, VersionReq}; +use tempfile::Builder as TempFileBuilder; +use toml; + +use core::{Dependency, Package, PackageIdSpec, Source, SourceId}; +use core::{PackageId, Workspace}; +use ops::{self, CompileFilter, DefaultExecutor}; +use sources::{GitSource, PathSource, SourceConfigMap}; +use util::{internal, Config}; +use util::{FileLock, Filesystem}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; + +#[derive(Deserialize, Serialize)] +#[serde(untagged)] +enum CrateListing { + V1(CrateListingV1), + Empty(Empty), +} + +#[derive(Deserialize, Serialize)] +#[serde(deny_unknown_fields)] +struct Empty {} + +#[derive(Deserialize, Serialize)] +struct CrateListingV1 { + v1: BTreeMap>, +} + +struct Transaction { + bins: Vec, +} + +impl Transaction { + fn success(mut self) { + self.bins.clear(); + } +} + +impl Drop for Transaction { + fn drop(&mut self) { + for bin in self.bins.iter() { + let _ = paths::remove_file(bin); + } + } +} + +pub fn install( + root: Option<&str>, + krates: Vec<&str>, + source_id: &SourceId, + vers: Option<&str>, + opts: &ops::CompileOptions, + force: bool, +) -> CargoResult<()> { + let root = resolve_root(root, opts.config)?; + let map = SourceConfigMap::new(opts.config)?; + + let (installed_anything, scheduled_error) = if krates.len() <= 1 { + install_one( + &root, + &map, + krates.into_iter().next(), + source_id, + vers, + opts, + force, + true, + )?; + (true, false) + } else { + let mut succeeded = vec![]; + let mut failed = vec![]; + let mut first = true; + for krate in krates { + let root = root.clone(); + let map = map.clone(); + match install_one( + &root, + &map, + Some(krate), + source_id, + vers, + opts, + force, + first, + ) { + Ok(()) => succeeded.push(krate), + Err(e) => { + ::handle_error(e, &mut opts.config.shell()); + failed.push(krate) + } + } + first = false; + } + + let mut summary = vec![]; + if !succeeded.is_empty() { + summary.push(format!("Successfully installed {}!", succeeded.join(", "))); + } + if !failed.is_empty() { + summary.push(format!( + "Failed to install {} (see error(s) above).", + failed.join(", ") + )); + } + if !succeeded.is_empty() || !failed.is_empty() { + opts.config.shell().status("Summary", summary.join(" "))?; + } + + (!succeeded.is_empty(), !failed.is_empty()) + }; + + if installed_anything { + // Print a warning that if this directory isn't in PATH that they won't be + // able to run these commands. + let dst = metadata(opts.config, &root)?.parent().join("bin"); + let path = env::var_os("PATH").unwrap_or_default(); + for path in env::split_paths(&path) { + if path == dst { + return Ok(()); + } + } + + opts.config.shell().warn(&format!( + "be sure to add `{}` to your PATH to be \ + able to run the installed binaries", + dst.display() + ))?; + } + + if scheduled_error { + bail!("some crates failed to install"); + } + + Ok(()) +} + +fn install_one( + root: &Filesystem, + map: &SourceConfigMap, + krate: Option<&str>, + source_id: &SourceId, + vers: Option<&str>, + opts: &ops::CompileOptions, + force: bool, + is_first_install: bool, +) -> CargoResult<()> { + let config = opts.config; + + let (pkg, source) = if source_id.is_git() { + select_pkg( + GitSource::new(source_id, config)?, + krate, + vers, + config, + is_first_install, + &mut |git| git.read_packages(), + )? + } else if source_id.is_path() { + let path = source_id + .url() + .to_file_path() + .map_err(|()| format_err!("path sources must have a valid path"))?; + let mut src = PathSource::new(&path, source_id, config); + src.update().chain_err(|| { + format_err!( + "`{}` is not a crate root; specify a crate to \ + install from crates.io, or use --path or --git to \ + specify an alternate source", + path.display() + ) + })?; + select_pkg( + PathSource::new(&path, source_id, config), + krate, + vers, + config, + is_first_install, + &mut |path| path.read_packages(), + )? + } else { + select_pkg( + map.load(source_id)?, + krate, + vers, + config, + is_first_install, + &mut |_| { + bail!( + "must specify a crate to install from \ + crates.io, or use --path or --git to \ + specify alternate source" + ) + }, + )? + }; + + let mut td_opt = None; + let mut needs_cleanup = false; + let overidden_target_dir = if source_id.is_path() { + None + } else if let Some(dir) = config.target_dir()? { + Some(dir) + } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() { + let p = td.path().to_owned(); + td_opt = Some(td); + Some(Filesystem::new(p)) + } else { + needs_cleanup = true; + Some(Filesystem::new(config.cwd().join("target-install"))) + }; + + let ws = match overidden_target_dir { + Some(dir) => Workspace::ephemeral(pkg, config, Some(dir), false)?, + None => { + let mut ws = Workspace::new(pkg.manifest_path(), config)?; + ws.set_require_optional_deps(false); + ws + } + }; + let pkg = ws.current()?; + + config.shell().status("Installing", pkg)?; + + // Preflight checks to check up front whether we'll overwrite something. + // We have to check this again afterwards, but may as well avoid building + // anything if we're gonna throw it away anyway. + { + let metadata = metadata(config, root)?; + let list = read_crate_list(&metadata)?; + let dst = metadata.parent().join("bin"); + check_overwrites(&dst, pkg, &opts.filter, &list, force)?; + } + + let compile = + ops::compile_ws(&ws, Some(source), opts, Arc::new(DefaultExecutor)).chain_err(|| { + if let Some(td) = td_opt.take() { + // preserve the temporary directory, so the user can inspect it + td.into_path(); + } + + format_err!( + "failed to compile `{}`, intermediate artifacts can be \ + found at `{}`", + pkg, + ws.target_dir().display() + ) + })?; + let binaries: Vec<(&str, &Path)> = compile + .binaries + .iter() + .map(|bin| { + let name = bin.file_name().unwrap(); + if let Some(s) = name.to_str() { + Ok((s, bin.as_ref())) + } else { + bail!("Binary `{:?}` name can't be serialized into string", name) + } + }) + .collect::>()?; + if binaries.is_empty() { + bail!( + "no binaries are available for install using the selected \ + features" + ); + } + + let metadata = metadata(config, root)?; + let mut list = read_crate_list(&metadata)?; + let dst = metadata.parent().join("bin"); + let duplicates = check_overwrites(&dst, pkg, &opts.filter, &list, force)?; + + fs::create_dir_all(&dst)?; + + // Copy all binaries to a temporary directory under `dst` first, catching + // some failure modes (e.g. out of space) before touching the existing + // binaries. This directory will get cleaned up via RAII. + let staging_dir = TempFileBuilder::new() + .prefix("cargo-install") + .tempdir_in(&dst)?; + for &(bin, src) in binaries.iter() { + let dst = staging_dir.path().join(bin); + // Try to move if `target_dir` is transient. + if !source_id.is_path() && fs::rename(src, &dst).is_ok() { + continue; + } + fs::copy(src, &dst).chain_err(|| { + format_err!("failed to copy `{}` to `{}`", src.display(), dst.display()) + })?; + } + + let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries + .iter() + .map(|&(bin, _)| bin) + .partition(|&bin| duplicates.contains_key(bin)); + + let mut installed = Transaction { bins: Vec::new() }; + + // Move the temporary copies into `dst` starting with new binaries. + for bin in to_install.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Installing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + installed.bins.push(dst); + } + + // Repeat for binaries which replace existing ones but don't pop the error + // up until after updating metadata. + let mut replaced_names = Vec::new(); + let result = { + let mut try_install = || -> CargoResult<()> { + for &bin in to_replace.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Replacing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + replaced_names.push(bin); + } + Ok(()) + }; + try_install() + }; + + // Update records of replaced binaries. + for &bin in replaced_names.iter() { + if let Some(&Some(ref p)) = duplicates.get(bin) { + if let Some(set) = list.v1.get_mut(p) { + set.remove(bin); + } + } + list.v1 + .entry(pkg.package_id().clone()) + .or_insert_with(BTreeSet::new) + .insert(bin.to_string()); + } + + // Remove empty metadata lines. + let pkgs = list.v1 + .iter() + .filter_map(|(p, set)| { + if set.is_empty() { + Some(p.clone()) + } else { + None + } + }) + .collect::>(); + for p in pkgs.iter() { + list.v1.remove(p); + } + + // If installation was successful record newly installed binaries. + if result.is_ok() { + list.v1 + .entry(pkg.package_id().clone()) + .or_insert_with(BTreeSet::new) + .extend(to_install.iter().map(|s| s.to_string())); + } + + let write_result = write_crate_list(&metadata, list); + match write_result { + // Replacement error (if any) isn't actually caused by write error + // but this seems to be the only way to show both. + Err(err) => result.chain_err(|| err)?, + Ok(_) => result?, + } + + // Reaching here means all actions have succeeded. Clean up. + installed.success(); + if needs_cleanup { + // Don't bother grabbing a lock as we're going to blow it all away + // anyway. + let target_dir = ws.target_dir().into_path_unlocked(); + paths::remove_dir_all(&target_dir)?; + } + + Ok(()) +} + +fn select_pkg<'a, T>( + mut source: T, + name: Option<&str>, + vers: Option<&str>, + config: &Config, + needs_update: bool, + list_all: &mut FnMut(&mut T) -> CargoResult>, +) -> CargoResult<(Package, Box)> +where + T: Source + 'a, +{ + if needs_update { + source.update()?; + } + + match name { + Some(name) => { + let vers = match vers { + Some(v) => { + // If the version begins with character <, >, =, ^, ~ parse it as a + // version range, otherwise parse it as a specific version + let first = v.chars() + .nth(0) + .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?; + + match first { + '<' | '>' | '=' | '^' | '~' => match v.parse::() { + Ok(v) => Some(v.to_string()), + Err(_) => bail!( + "the `--vers` provided, `{}`, is \ + not a valid semver version requirement\n\n + Please have a look at \ + http://doc.crates.io/specifying-dependencies.html \ + for the correct format", + v + ), + }, + _ => match v.parse::() { + Ok(v) => Some(format!("={}", v)), + Err(_) => { + let mut msg = format!( + "\ + the `--vers` provided, `{}`, is \ + not a valid semver version\n\n\ + historically Cargo treated this \ + as a semver version requirement \ + accidentally\nand will continue \ + to do so, but this behavior \ + will be removed eventually", + v + ); + + // If it is not a valid version but it is a valid version + // requirement, add a note to the warning + if v.parse::().is_ok() { + msg.push_str(&format!( + "\nif you want to specify semver range, \ + add an explicit qualifier, like ^{}", + v + )); + } + config.shell().warn(&msg)?; + Some(v.to_string()) + } + }, + } + } + None => None, + }; + let vers = vers.as_ref().map(|s| &**s); + let dep = Dependency::parse_no_deprecated(name, vers, source.source_id())?; + let deps = source.query_vec(&dep)?; + match deps.iter().map(|p| p.package_id()).max() { + Some(pkgid) => { + let pkg = source.download(pkgid)?; + Ok((pkg, Box::new(source))) + } + None => { + let vers_info = vers.map(|v| format!(" with version `{}`", v)) + .unwrap_or_default(); + Err(format_err!( + "could not find `{}` in {}{}", + name, + source.source_id(), + vers_info + )) + } + } + } + None => { + let candidates = list_all(&mut source)?; + let binaries = candidates + .iter() + .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0); + let examples = candidates + .iter() + .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0); + let pkg = match one(binaries, |v| multi_err("binaries", v))? { + Some(p) => p, + None => match one(examples, |v| multi_err("examples", v))? { + Some(p) => p, + None => bail!( + "no packages found with binaries or \ + examples" + ), + }, + }; + return Ok((pkg.clone(), Box::new(source))); + + fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { + pkgs.sort_by(|a, b| a.name().cmp(&b.name())); + format!( + "multiple packages with {} found: {}", + kind, + pkgs.iter() + .map(|p| p.name().to_inner()) + .collect::>() + .join(", ") + ) + } + } + } +} + +fn one(mut i: I, f: F) -> CargoResult> +where + I: Iterator, + F: FnOnce(Vec) -> String, +{ + match (i.next(), i.next()) { + (Some(i1), Some(i2)) => { + let mut v = vec![i1, i2]; + v.extend(i); + Err(format_err!("{}", f(v))) + } + (Some(i), None) => Ok(Some(i)), + (None, _) => Ok(None), + } +} + +fn check_overwrites( + dst: &Path, + pkg: &Package, + filter: &ops::CompileFilter, + prev: &CrateListingV1, + force: bool, +) -> CargoResult>> { + // If explicit --bin or --example flags were passed then those'll + // get checked during cargo_compile, we only care about the "build + // everything" case here + if !filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { + bail!("specified package has no binaries") + } + let duplicates = find_duplicates(dst, pkg, filter, prev); + if force || duplicates.is_empty() { + return Ok(duplicates); + } + // Format the error message. + let mut msg = String::new(); + for (bin, p) in duplicates.iter() { + msg.push_str(&format!("binary `{}` already exists in destination", bin)); + if let Some(p) = p.as_ref() { + msg.push_str(&format!(" as part of `{}`\n", p)); + } else { + msg.push_str("\n"); + } + } + msg.push_str("Add --force to overwrite"); + Err(format_err!("{}", msg)) +} + +fn find_duplicates( + dst: &Path, + pkg: &Package, + filter: &ops::CompileFilter, + prev: &CrateListingV1, +) -> BTreeMap> { + let check = |name: String| { + // Need to provide type, works around Rust Issue #93349 + let name = format!("{}{}", name, env::consts::EXE_SUFFIX); + if fs::metadata(dst.join(&name)).is_err() { + None + } else if let Some((p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) { + Some((name, Some(p.clone()))) + } else { + Some((name, None)) + } + }; + match *filter { + CompileFilter::Default { .. } => pkg.targets() + .iter() + .filter(|t| t.is_bin()) + .filter_map(|t| check(t.name().to_string())) + .collect(), + CompileFilter::Only { + ref bins, + ref examples, + .. + } => { + let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { + pkg.targets() + .iter() + .filter(|t| t.is_bin()) + .map(|t| t.name().to_string()) + .collect() + }); + let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { + pkg.targets() + .iter() + .filter(|t| t.is_bin_example()) + .map(|t| t.name().to_string()) + .collect() + }); + + all_bins + .iter() + .chain(all_examples.iter()) + .filter_map(|t| check(t.clone())) + .collect::>>() + } + } +} + +fn read_crate_list(file: &FileLock) -> CargoResult { + let listing = (|| -> CargoResult<_> { + let mut contents = String::new(); + file.file().read_to_string(&mut contents)?; + let listing = + toml::from_str(&contents).chain_err(|| internal("invalid TOML found for metadata"))?; + match listing { + CrateListing::V1(v1) => Ok(v1), + CrateListing::Empty(_) => Ok(CrateListingV1 { + v1: BTreeMap::new(), + }), + } + })() + .chain_err(|| { + format_err!( + "failed to parse crate metadata at `{}`", + file.path().to_string_lossy() + ) + })?; + Ok(listing) +} + +fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()> { + (|| -> CargoResult<_> { + let mut file = file.file(); + file.seek(SeekFrom::Start(0))?; + file.set_len(0)?; + let data = toml::to_string(&CrateListing::V1(listing))?; + file.write_all(data.as_bytes())?; + Ok(()) + })() + .chain_err(|| { + format_err!( + "failed to write crate metadata at `{}`", + file.path().to_string_lossy() + ) + })?; + Ok(()) +} + +pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { + let dst = resolve_root(dst, config)?; + let dst = metadata(config, &dst)?; + let list = read_crate_list(&dst)?; + for (k, v) in list.v1.iter() { + println!("{}:", k); + for bin in v { + println!(" {}", bin); + } + } + Ok(()) +} + +pub fn uninstall( + root: Option<&str>, + specs: Vec<&str>, + bins: &[String], + config: &Config, +) -> CargoResult<()> { + if specs.len() > 1 && !bins.is_empty() { + bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); + } + + let root = resolve_root(root, config)?; + let scheduled_error = if specs.len() == 1 { + uninstall_one(&root, specs[0], bins, config)?; + false + } else { + let mut succeeded = vec![]; + let mut failed = vec![]; + for spec in specs { + let root = root.clone(); + match uninstall_one(&root, spec, bins, config) { + Ok(()) => succeeded.push(spec), + Err(e) => { + ::handle_error(e, &mut config.shell()); + failed.push(spec) + } + } + } + + let mut summary = vec![]; + if !succeeded.is_empty() { + summary.push(format!( + "Successfully uninstalled {}!", + succeeded.join(", ") + )); + } + if !failed.is_empty() { + summary.push(format!( + "Failed to uninstall {} (see error(s) above).", + failed.join(", ") + )); + } + + if !succeeded.is_empty() || !failed.is_empty() { + config.shell().status("Summary", summary.join(" "))?; + } + + !failed.is_empty() + }; + + if scheduled_error { + bail!("some packages failed to uninstall"); + } + + Ok(()) +} + +pub fn uninstall_one( + root: &Filesystem, + spec: &str, + bins: &[String], + config: &Config, +) -> CargoResult<()> { + let crate_metadata = metadata(config, root)?; + let mut metadata = read_crate_list(&crate_metadata)?; + let mut to_remove = Vec::new(); + { + let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone(); + let mut installed = match metadata.v1.entry(result.clone()) { + Entry::Occupied(e) => e, + Entry::Vacant(..) => panic!("entry not found: {}", result), + }; + let dst = crate_metadata.parent().join("bin"); + for bin in installed.get() { + let bin = dst.join(bin); + if fs::metadata(&bin).is_err() { + bail!( + "corrupt metadata, `{}` does not exist when it should", + bin.display() + ) + } + } + + let bins = bins.iter() + .map(|s| { + if s.ends_with(env::consts::EXE_SUFFIX) { + s.to_string() + } else { + format!("{}{}", s, env::consts::EXE_SUFFIX) + } + }) + .collect::>(); + + for bin in bins.iter() { + if !installed.get().contains(bin) { + bail!("binary `{}` not installed as part of `{}`", bin, result) + } + } + + if bins.is_empty() { + to_remove.extend(installed.get().iter().map(|b| dst.join(b))); + installed.get_mut().clear(); + } else { + for bin in bins.iter() { + to_remove.push(dst.join(bin)); + installed.get_mut().remove(bin); + } + } + if installed.get().is_empty() { + installed.remove(); + } + } + write_crate_list(&crate_metadata, metadata)?; + for bin in to_remove { + config.shell().status("Removing", bin.display())?; + paths::remove_file(bin)?; + } + + Ok(()) +} + +fn metadata(config: &Config, root: &Filesystem) -> CargoResult { + root.open_rw(Path::new(".crates.toml"), config, "crate metadata") +} + +fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { + let config_root = config.get_path("install.root")?; + Ok(flag.map(PathBuf::from) + .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)) + .or_else(move || config_root.map(|v| v.val)) + .map(Filesystem::new) + .unwrap_or_else(|| config.home().clone())) +} diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs new file mode 100644 index 000000000..4d53ede2f --- /dev/null +++ b/src/cargo/ops/cargo_new.rs @@ -0,0 +1,660 @@ +use std::collections::BTreeMap; +use std::env; +use std::fs; +use std::fmt; +use std::path::{Path, PathBuf}; + +use git2::Config as GitConfig; +use git2::Repository as GitRepository; + +use core::Workspace; +use ops::is_bad_artifact_name; +use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo}; +use util::{paths, Config}; +use util::errors::{CargoResult, CargoResultExt}; + +use toml; + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum VersionControl { + Git, + Hg, + Pijul, + Fossil, + NoVcs, +} + +#[derive(Debug)] +pub struct NewOptions { + pub version_control: Option, + pub kind: NewProjectKind, + /// Absolute path to the directory for the new project + pub path: PathBuf, + pub name: Option, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum NewProjectKind { + Bin, + Lib, +} + +impl NewProjectKind { + fn is_bin(&self) -> bool { + *self == NewProjectKind::Bin + } +} + +impl fmt::Display for NewProjectKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + NewProjectKind::Bin => "binary (application)", + NewProjectKind::Lib => "library", + }.fmt(f) + } +} + +struct SourceFileInformation { + relative_path: String, + target_name: String, + bin: bool, +} + +struct MkOptions<'a> { + version_control: Option, + path: &'a Path, + name: &'a str, + source_files: Vec, + bin: bool, +} + +impl NewOptions { + pub fn new( + version_control: Option, + bin: bool, + lib: bool, + path: PathBuf, + name: Option, + ) -> CargoResult { + let kind = match (bin, lib) { + (true, true) => bail!("can't specify both lib and binary outputs"), + (false, true) => NewProjectKind::Lib, + // default to bin + (_, false) => NewProjectKind::Bin, + }; + + let opts = NewOptions { + version_control, + kind, + path, + name, + }; + Ok(opts) + } +} + +struct CargoNewConfig { + name: Option, + email: Option, + version_control: Option, +} + +fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> { + if let Some(ref name) = opts.name { + return Ok(name); + } + + let file_name = path.file_name().ok_or_else(|| { + format_err!( + "cannot auto-detect project name from path {:?} ; use --name to override", + path.as_os_str() + ) + })?; + + file_name.to_str().ok_or_else(|| { + format_err!( + "cannot create project with a non-unicode name: {:?}", + file_name + ) + }) +} + +fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> { + // If --name is already used to override, no point in suggesting it + // again as a fix. + let name_help = match opts.name { + Some(_) => "", + None => "\nuse --name to override crate name", + }; + + // Ban keywords + test list found at + // https://doc.rust-lang.org/grammar.html#keywords + let blacklist = [ + "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do", + "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", + "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", + "pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait", + "true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", + ]; + if blacklist.contains(&name) || (opts.kind.is_bin() && is_bad_artifact_name(name)) { + bail!( + "The name `{}` cannot be used as a crate name{}", + name, + name_help + ) + } + + if let Some(ref c) = name.chars().nth(0) { + if c.is_digit(10) { + bail!( + "Package names starting with a digit cannot be used as a crate name{}", + name_help + ) + } + } + + for c in name.chars() { + if c.is_alphanumeric() { + continue; + } + if c == '_' || c == '-' { + continue; + } + bail!( + "Invalid character `{}` in crate name: `{}`{}", + c, + name, + name_help + ) + } + Ok(()) +} + +fn detect_source_paths_and_types( + project_path: &Path, + project_name: &str, + detected_files: &mut Vec, +) -> CargoResult<()> { + let path = project_path; + let name = project_name; + + enum H { + Bin, + Lib, + Detect, + } + + struct Test { + proposed_path: String, + handling: H, + } + + let tests = vec![ + Test { + proposed_path: format!("src/main.rs"), + handling: H::Bin, + }, + Test { + proposed_path: format!("main.rs"), + handling: H::Bin, + }, + Test { + proposed_path: format!("src/{}.rs", name), + handling: H::Detect, + }, + Test { + proposed_path: format!("{}.rs", name), + handling: H::Detect, + }, + Test { + proposed_path: format!("src/lib.rs"), + handling: H::Lib, + }, + Test { + proposed_path: format!("lib.rs"), + handling: H::Lib, + }, + ]; + + for i in tests { + let pp = i.proposed_path; + + // path/pp does not exist or is not a file + if !fs::metadata(&path.join(&pp)) + .map(|x| x.is_file()) + .unwrap_or(false) + { + continue; + } + + let sfi = match i.handling { + H::Bin => SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: true, + }, + H::Lib => SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: false, + }, + H::Detect => { + let content = paths::read(&path.join(pp.clone()))?; + let isbin = content.contains("fn main"); + SourceFileInformation { + relative_path: pp, + target_name: project_name.to_string(), + bin: isbin, + } + } + }; + detected_files.push(sfi); + } + + // Check for duplicate lib attempt + + let mut previous_lib_relpath: Option<&str> = None; + let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); + + for i in detected_files { + if i.bin { + if let Some(x) = BTreeMap::get::(&duplicates_checker, i.target_name.as_ref()) { + bail!( + "\ +multiple possible binary sources found: + {} + {} +cannot automatically generate Cargo.toml as the main target would be ambiguous", + &x.relative_path, + &i.relative_path + ); + } + duplicates_checker.insert(i.target_name.as_ref(), i); + } else { + if let Some(plp) = previous_lib_relpath { + bail!( + "cannot have a project with \ + multiple libraries, \ + found both `{}` and `{}`", + plp, + i.relative_path + ) + } + previous_lib_relpath = Some(&i.relative_path); + } + } + + Ok(()) +} + +fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation { + if bin { + SourceFileInformation { + relative_path: "src/main.rs".to_string(), + target_name: project_name, + bin: true, + } + } else { + SourceFileInformation { + relative_path: "src/lib.rs".to_string(), + target_name: project_name, + bin: false, + } + } +} + +pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = &opts.path; + if fs::metadata(path).is_ok() { + bail!( + "destination `{}` already exists\n\n\ + Use `cargo init` to initialize the directory", + path.display() + ) + } + + let name = get_name(path, opts)?; + check_name(name, opts)?; + + let mkopts = MkOptions { + version_control: opts.version_control, + path: &path, + name, + source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())], + bin: opts.kind.is_bin(), + }; + + mk(config, &mkopts).chain_err(|| { + format_err!( + "Failed to create project `{}` at `{}`", + name, + path.display() + ) + })?; + Ok(()) +} + +pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = &opts.path; + + if fs::metadata(&path.join("Cargo.toml")).is_ok() { + bail!("`cargo init` cannot be run on existing Cargo projects") + } + + let name = get_name(&path, opts)?; + check_name(name, opts)?; + + let mut src_paths_types = vec![]; + + detect_source_paths_and_types(&path, name, &mut src_paths_types)?; + + if src_paths_types.is_empty() { + src_paths_types.push(plan_new_source_file(opts.kind.is_bin(), name.to_string())); + } else { + // --bin option may be ignored if lib.rs or src/lib.rs present + // Maybe when doing `cargo init --bin` inside a library project stub, + // user may mean "initialize for library, but also add binary target" + } + + let mut version_control = opts.version_control; + + if version_control == None { + let mut num_detected_vsces = 0; + + if fs::metadata(&path.join(".git")).is_ok() { + version_control = Some(VersionControl::Git); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".hg")).is_ok() { + version_control = Some(VersionControl::Hg); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".pijul")).is_ok() { + version_control = Some(VersionControl::Pijul); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".fossil")).is_ok() { + version_control = Some(VersionControl::Fossil); + num_detected_vsces += 1; + } + + // if none exists, maybe create git, like in `cargo new` + + if num_detected_vsces > 1 { + bail!( + "more than one of .hg, .git, .pijul, .fossil configurations \ + found and the ignore file can't be filled in as \ + a result. specify --vcs to override detection" + ); + } + } + + let mkopts = MkOptions { + version_control, + path, + name, + bin: src_paths_types.iter().any(|x| x.bin), + source_files: src_paths_types, + }; + + mk(config, &mkopts).chain_err(|| { + format_err!( + "Failed to create project `{}` at `{}`", + name, + path.display() + ) + })?; + Ok(()) +} + +fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { + GitRepo::discover(path, cwd).is_ok() || HgRepo::discover(path, cwd).is_ok() +} + +fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { + let path = opts.path; + let name = opts.name; + let cfg = global_config(config)?; + // Please ensure that ignore and hgignore are in sync. + let ignore = [ + "\n", + "/target\n", + "**/*.rs.bk\n", + if !opts.bin { "Cargo.lock\n" } else { "" }, + ].concat(); + // Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the + // file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for + // more. + let hgignore = [ + "\n", + "^target/\n", + "glob:*.rs.bk\n", + if !opts.bin { "glob:Cargo.lock\n" } else { "" }, + ].concat(); + + let vcs = opts.version_control.unwrap_or_else(|| { + let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); + match (cfg.version_control, in_existing_vcs) { + (None, false) => VersionControl::Git, + (Some(opt), false) => opt, + (_, true) => VersionControl::NoVcs, + } + }); + + match vcs { + VersionControl::Git => { + if !fs::metadata(&path.join(".git")).is_ok() { + GitRepo::init(path, config.cwd())?; + } + paths::append(&path.join(".gitignore"), ignore.as_bytes())?; + } + VersionControl::Hg => { + if !fs::metadata(&path.join(".hg")).is_ok() { + HgRepo::init(path, config.cwd())?; + } + paths::append(&path.join(".hgignore"), hgignore.as_bytes())?; + } + VersionControl::Pijul => { + if !fs::metadata(&path.join(".pijul")).is_ok() { + PijulRepo::init(path, config.cwd())?; + } + paths::append(&path.join(".ignore"), ignore.as_bytes())?; + } + VersionControl::Fossil => { + if !fs::metadata(&path.join(".fossil")).is_ok() { + FossilRepo::init(path, config.cwd())?; + } + } + VersionControl::NoVcs => { + fs::create_dir_all(path)?; + } + }; + + let (author_name, email) = discover_author()?; + // Hoo boy, sure glad we've got exhaustiveness checking behind us. + let author = match (cfg.name, cfg.email, author_name, email) { + (Some(name), Some(email), _, _) + | (Some(name), None, _, Some(email)) + | (None, Some(email), name, _) + | (None, None, name, Some(email)) => format!("{} <{}>", name, email), + (Some(name), None, _, None) | (None, None, name, None) => name, + }; + + let mut cargotoml_path_specifier = String::new(); + + // Calculate what [lib] and [[bin]]s do we need to append to Cargo.toml + + for i in &opts.source_files { + if i.bin { + if i.relative_path != "src/main.rs" { + cargotoml_path_specifier.push_str(&format!( + r#" +[[bin]] +name = "{}" +path = {} +"#, + i.target_name, + toml::Value::String(i.relative_path.clone()) + )); + } + } else if i.relative_path != "src/lib.rs" { + cargotoml_path_specifier.push_str(&format!( + r#" +[lib] +name = "{}" +path = {} +"#, + i.target_name, + toml::Value::String(i.relative_path.clone()) + )); + } + } + + // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed + + paths::write( + &path.join("Cargo.toml"), + format!( + r#"[package] +name = "{}" +version = "0.1.0" +authors = [{}] + +[dependencies] +{}"#, + name, + toml::Value::String(author), + cargotoml_path_specifier + ).as_bytes(), + )?; + + // Create all specified source files + // (with respective parent directories) + // if they are don't exist + + for i in &opts.source_files { + let path_of_source_file = path.join(i.relative_path.clone()); + + if let Some(src_dir) = path_of_source_file.parent() { + fs::create_dir_all(src_dir)?; + } + + let default_file_content: &[u8] = if i.bin { + b"\ +fn main() { + println!(\"Hello, world!\"); +} +" + } else { + b"\ +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} +" + }; + + if !fs::metadata(&path_of_source_file) + .map(|x| x.is_file()) + .unwrap_or(false) + { + paths::write(&path_of_source_file, default_file_content)?; + } + } + + if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { + let msg = format!( + "compiling this new crate may not work due to invalid \ + workspace configuration\n\n{}", + e + ); + config.shell().warn(msg)?; + } + + Ok(()) +} + +fn get_environment_variable(variables: &[&str]) -> Option { + variables.iter().filter_map(|var| env::var(var).ok()).next() +} + +fn discover_author() -> CargoResult<(String, Option)> { + let cwd = env::current_dir()?; + let git_config = if let Ok(repo) = GitRepository::discover(&cwd) { + repo.config() + .ok() + .or_else(|| GitConfig::open_default().ok()) + } else { + GitConfig::open_default().ok() + }; + let git_config = git_config.as_ref(); + let name_variables = [ + "CARGO_NAME", + "GIT_AUTHOR_NAME", + "GIT_COMMITTER_NAME", + "USER", + "USERNAME", + "NAME", + ]; + let name = get_environment_variable(&name_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) + .or_else(|| get_environment_variable(&name_variables[3..])); + + let name = match name { + Some(name) => name, + None => { + let username_var = if cfg!(windows) { "USERNAME" } else { "USER" }; + bail!( + "could not determine the current user, please set ${}", + username_var + ) + } + }; + let email_variables = [ + "CARGO_EMAIL", + "GIT_AUTHOR_EMAIL", + "GIT_COMMITTER_EMAIL", + "EMAIL", + ]; + let email = get_environment_variable(&email_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) + .or_else(|| get_environment_variable(&email_variables[3..])); + + let name = name.trim().to_string(); + let email = email.map(|s| s.trim().to_string()); + + Ok((name, email)) +} + +fn global_config(config: &Config) -> CargoResult { + let name = config.get_string("cargo-new.name")?.map(|s| s.val); + let email = config.get_string("cargo-new.email")?.map(|s| s.val); + let vcs = config.get_string("cargo-new.vcs")?; + + let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) { + Some(("git", _)) => Some(VersionControl::Git), + Some(("hg", _)) => Some(VersionControl::Hg), + Some(("pijul", _)) => Some(VersionControl::Pijul), + Some(("none", _)) => Some(VersionControl::NoVcs), + Some((s, p)) => { + return Err(internal(format!( + "invalid configuration for key \ + `cargo-new.vcs`, unknown vcs `{}` \ + (found in {})", + s, p + ))) + } + None => None, + }; + Ok(CargoNewConfig { + name, + email, + version_control: vcs, + }) +} diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs new file mode 100644 index 000000000..96f52d367 --- /dev/null +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -0,0 +1,116 @@ +use serde::ser::{self, Serialize}; + +use core::resolver::Resolve; +use core::{Package, PackageId, Workspace}; +use ops::{self, Packages}; +use util::CargoResult; + +const VERSION: u32 = 1; + +pub struct OutputMetadataOptions { + pub features: Vec, + pub no_default_features: bool, + pub all_features: bool, + pub no_deps: bool, + pub version: u32, +} + +/// Loads the manifest, resolves the dependencies of the project to the concrete +/// used versions - considering overrides - and writes all dependencies in a JSON +/// format to stdout. +pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult { + if opt.version != VERSION { + bail!( + "metadata version {} not supported, only {} is currently supported", + opt.version, + VERSION + ); + } + if opt.no_deps { + metadata_no_deps(ws, opt) + } else { + metadata_full(ws, opt) + } +} + +fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult { + Ok(ExportInfo { + packages: ws.members().cloned().collect(), + workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), + resolve: None, + target_directory: ws.target_dir().display().to_string(), + version: VERSION, + workspace_root: ws.root().display().to_string(), + }) +} + +fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult { + let specs = Packages::All.into_package_id_specs(ws)?; + let deps = ops::resolve_ws_precisely( + ws, + None, + &opt.features, + opt.all_features, + opt.no_default_features, + &specs, + )?; + let (packages, resolve) = deps; + + let packages = packages + .package_ids() + .map(|i| packages.get(i).map(|p| p.clone())) + .collect::>>()?; + + Ok(ExportInfo { + packages, + workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), + resolve: Some(MetadataResolve { + resolve, + root: ws.current_opt().map(|pkg| pkg.package_id().clone()), + }), + target_directory: ws.target_dir().display().to_string(), + version: VERSION, + workspace_root: ws.root().display().to_string(), + }) +} + +#[derive(Serialize)] +pub struct ExportInfo { + packages: Vec, + workspace_members: Vec, + resolve: Option, + target_directory: String, + version: u32, + workspace_root: String, +} + +/// Newtype wrapper to provide a custom `Serialize` implementation. +/// The one from lockfile does not fit because it uses a non-standard +/// format for `PackageId`s +#[derive(Serialize)] +struct MetadataResolve { + #[serde(rename = "nodes", serialize_with = "serialize_resolve")] resolve: Resolve, + root: Option, +} + +fn serialize_resolve(resolve: &Resolve, s: S) -> Result +where + S: ser::Serializer, +{ + #[derive(Serialize)] + struct Node<'a> { + id: &'a PackageId, + dependencies: Vec<&'a PackageId>, + features: Vec<&'a str>, + } + + resolve + .iter() + .map(|id| Node { + id, + dependencies: resolve.deps(id).collect(), + features: resolve.features_sorted(id), + }) + .collect::>() + .serialize(s) +} diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs new file mode 100644 index 000000000..1ae724fe7 --- /dev/null +++ b/src/cargo/ops/cargo_package.rs @@ -0,0 +1,388 @@ +use std::fs::{self, File}; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::{self, Path}; +use std::sync::Arc; + +use flate2::read::GzDecoder; +use flate2::{Compression, GzBuilder}; +use git2; +use tar::{Archive, Builder, EntryType, Header}; + +use core::{Package, Source, SourceId, Workspace}; +use sources::PathSource; +use util::{self, internal, Config, FileLock}; +use util::paths; +use util::errors::{CargoResult, CargoResultExt}; +use ops::{self, DefaultExecutor}; + +pub struct PackageOpts<'cfg> { + pub config: &'cfg Config, + pub list: bool, + pub check_metadata: bool, + pub allow_dirty: bool, + pub verify: bool, + pub jobs: Option, + pub target: Option, + pub registry: Option, +} + +pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult> { + ops::resolve_ws(ws)?; + let pkg = ws.current()?; + let config = ws.config(); + + let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); + src.update()?; + + if opts.check_metadata { + check_metadata(pkg, config)?; + } + + verify_dependencies(pkg)?; + + if opts.list { + let root = pkg.root(); + let mut list: Vec<_> = src.list_files(pkg)? + .iter() + .map(|file| util::without_prefix(file, root).unwrap().to_path_buf()) + .collect(); + if include_lockfile(&pkg) { + list.push("Cargo.lock".into()); + } + list.sort(); + for file in list.iter() { + println!("{}", file.display()); + } + return Ok(None); + } + + if !opts.allow_dirty { + check_not_dirty(pkg, &src)?; + } + + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let dir = ws.target_dir().join("package"); + let mut dst = { + let tmp = format!(".{}", filename); + dir.open_rw(&tmp, config, "package scratch space")? + }; + + // Package up and test a temporary tarball and only move it to the final + // location if it actually passes all our tests. Any previously existing + // tarball can be assumed as corrupt or invalid, so we just blow it away if + // it exists. + config + .shell() + .status("Packaging", pkg.package_id().to_string())?; + dst.file().set_len(0)?; + tar(ws, &src, dst.file(), &filename) + .chain_err(|| format_err!("failed to prepare local package for uploading"))?; + if opts.verify { + dst.seek(SeekFrom::Start(0))?; + run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")? + } + dst.seek(SeekFrom::Start(0))?; + { + let src_path = dst.path(); + let dst_path = dst.parent().join(&filename); + fs::rename(&src_path, &dst_path) + .chain_err(|| "failed to move temporary tarball into final location")?; + } + Ok(Some(dst)) +} + +fn include_lockfile(pkg: &Package) -> bool { + pkg.manifest().publish_lockfile() && pkg.targets().iter().any(|t| t.is_example() || t.is_bin()) +} + +// check that the package has some piece of metadata that a human can +// use to tell what the package is about. +fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { + let md = pkg.manifest().metadata(); + + let mut missing = vec![]; + + macro_rules! lacking { + ($( $($field: ident)||* ),*) => {{ + $( + if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* { + $(missing.push(stringify!($field).replace("_", "-"));)* + } + )* + }} + } + lacking!( + description, + license || license_file, + documentation || homepage || repository + ); + + if !missing.is_empty() { + let mut things = missing[..missing.len() - 1].join(", "); + // things will be empty if and only if length == 1 (i.e. the only case + // to have no `or`). + if !things.is_empty() { + things.push_str(" or "); + } + things.push_str(missing.last().unwrap()); + + config.shell().warn(&format!( + "manifest has no {things}.\n\ + See http://doc.crates.io/manifest.html#package-metadata for more info.", + things = things + ))? + } + Ok(()) +} + +// check that the package dependencies are safe to deploy. +fn verify_dependencies(pkg: &Package) -> CargoResult<()> { + for dep in pkg.dependencies() { + if dep.source_id().is_path() && !dep.specified_req() { + bail!( + "all path dependencies must have a version specified \ + when packaging.\ndependency `{}` does not specify \ + a version.", + dep.name() + ) + } + } + Ok(()) +} + +fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> { + if let Ok(repo) = git2::Repository::discover(p.root()) { + if let Some(workdir) = repo.workdir() { + debug!( + "found a git repo at {:?}, checking if index present", + workdir + ); + let path = p.manifest_path(); + let path = path.strip_prefix(workdir).unwrap_or(path); + if let Ok(status) = repo.status_file(path) { + if (status & git2::Status::IGNORED).is_empty() { + debug!("Cargo.toml found in repo, checking if dirty"); + return git(p, src, &repo); + } + } + } + } + + // No VCS recognized, we don't know if the directory is dirty or not, so we + // have to assume that it's clean. + return Ok(()); + + fn git(p: &Package, src: &PathSource, repo: &git2::Repository) -> CargoResult<()> { + let workdir = repo.workdir().unwrap(); + let dirty = src.list_files(p)? + .iter() + .filter(|file| { + let relative = file.strip_prefix(workdir).unwrap(); + if let Ok(status) = repo.status_file(relative) { + status != git2::Status::CURRENT + } else { + false + } + }) + .map(|path| { + path.strip_prefix(p.root()) + .unwrap_or(path) + .display() + .to_string() + }) + .collect::>(); + if dirty.is_empty() { + Ok(()) + } else { + bail!( + "{} files in the working directory contain changes that were \ + not yet committed into git:\n\n{}\n\n\ + to proceed despite this, pass the `--allow-dirty` flag", + dirty.len(), + dirty.join("\n") + ) + } + } +} + +fn tar(ws: &Workspace, src: &PathSource, dst: &File, filename: &str) -> CargoResult<()> { + // Prepare the encoder and its header + let filename = Path::new(filename); + let encoder = GzBuilder::new() + .filename(util::path2bytes(filename)?) + .write(dst, Compression::best()); + + // Put all package files into a compressed archive + let mut ar = Builder::new(encoder); + let pkg = ws.current()?; + let config = ws.config(); + let root = pkg.root(); + for file in src.list_files(pkg)?.iter() { + let relative = util::without_prefix(file, root).unwrap(); + check_filename(relative)?; + let relative = relative.to_str().ok_or_else(|| { + format_err!("non-utf8 path in source directory: {}", relative.display()) + })?; + config + .shell() + .verbose(|shell| shell.status("Archiving", &relative))?; + let path = format!( + "{}-{}{}{}", + pkg.name(), + pkg.version(), + path::MAIN_SEPARATOR, + relative + ); + + // The tar::Builder type by default will build GNU archives, but + // unfortunately we force it here to use UStar archives instead. The + // UStar format has more limitations on the length of path name that it + // can encode, so it's not quite as nice to use. + // + // Older cargos, however, had a bug where GNU archives were interpreted + // as UStar archives. This bug means that if we publish a GNU archive + // which has fully filled out metadata it'll be corrupt when unpacked by + // older cargos. + // + // Hopefully in the future after enough cargos have been running around + // with the bugfixed tar-rs library we'll be able to switch this over to + // GNU archives, but for now we'll just say that you can't encode paths + // in archives that are *too* long. + // + // For an instance of this in the wild, use the tar-rs 0.3.3 library to + // unpack the selectors 0.4.0 crate on crates.io. Either that or take a + // look at rust-lang/cargo#2326 + let mut header = Header::new_ustar(); + header + .set_path(&path) + .chain_err(|| format!("failed to add to archive: `{}`", relative))?; + let mut file = File::open(file) + .chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?; + let metadata = file.metadata() + .chain_err(|| format!("could not learn metadata for: `{}`", relative))?; + header.set_metadata(&metadata); + + if relative == "Cargo.toml" { + let orig = Path::new(&path).with_file_name("Cargo.toml.orig"); + header.set_path(&orig)?; + header.set_cksum(); + ar.append(&header, &mut file) + .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?; + + let mut header = Header::new_ustar(); + let toml = pkg.to_registry_toml(ws.config())?; + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(toml.len() as u64); + header.set_cksum(); + ar.append(&header, toml.as_bytes()) + .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?; + } else { + header.set_cksum(); + ar.append(&header, &mut file) + .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?; + } + } + + if include_lockfile(pkg) { + let toml = paths::read(&ws.root().join("Cargo.lock"))?; + let path = format!( + "{}-{}{}Cargo.lock", + pkg.name(), + pkg.version(), + path::MAIN_SEPARATOR + ); + let mut header = Header::new_ustar(); + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(toml.len() as u64); + header.set_cksum(); + ar.append(&header, toml.as_bytes()) + .chain_err(|| internal("could not archive source file `Cargo.lock`"))?; + } + + let encoder = ar.into_inner()?; + encoder.finish()?; + Ok(()) +} + +fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult<()> { + let config = ws.config(); + let pkg = ws.current()?; + + config.shell().status("Verifying", pkg)?; + + let f = GzDecoder::new(tar.file()); + let dst = tar.parent() + .join(&format!("{}-{}", pkg.name(), pkg.version())); + if dst.exists() { + paths::remove_dir_all(&dst)?; + } + let mut archive = Archive::new(f); + archive.unpack(dst.parent().unwrap())?; + + // Manufacture an ephemeral workspace to ensure that even if the top-level + // package has a workspace we can still build our new crate. + let id = SourceId::for_path(&dst)?; + let mut src = PathSource::new(&dst, &id, ws.config()); + let new_pkg = src.root_package()?; + let ws = Workspace::ephemeral(new_pkg, config, None, true)?; + + ops::compile_ws( + &ws, + None, + &ops::CompileOptions { + config, + jobs: opts.jobs, + target: opts.target.clone(), + features: Vec::new(), + no_default_features: false, + all_features: false, + spec: ops::Packages::Packages(Vec::new()), + filter: ops::CompileFilter::Default { + required_features_filterable: true, + }, + release: false, + message_format: ops::MessageFormat::Human, + mode: ops::CompileMode::Build, + target_rustdoc_args: None, + target_rustc_args: None, + }, + Arc::new(DefaultExecutor), + )?; + + Ok(()) +} + +// It can often be the case that files of a particular name on one platform +// can't actually be created on another platform. For example files with colons +// in the name are allowed on Unix but not on Windows. +// +// To help out in situations like this, issue about weird filenames when +// packaging as a "heads up" that something may not work on other platforms. +fn check_filename(file: &Path) -> CargoResult<()> { + let name = match file.file_name() { + Some(name) => name, + None => return Ok(()), + }; + let name = match name.to_str() { + Some(name) => name, + None => bail!( + "path does not have a unicode filename which may not unpack \ + on all platforms: {}", + file.display() + ), + }; + let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; + if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { + bail!( + "cannot package a filename with a special character `{}`: {}", + c, + file.display() + ) + } + Ok(()) +} diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs new file mode 100644 index 000000000..0461bc4c8 --- /dev/null +++ b/src/cargo/ops/cargo_pkgid.rs @@ -0,0 +1,16 @@ +use ops; +use core::{PackageIdSpec, Workspace}; +use util::CargoResult; + +pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult { + let resolve = match ops::load_pkg_lockfile(ws)? { + Some(resolve) => resolve, + None => bail!("a Cargo.lock must exist for this command"), + }; + + let pkgid = match spec { + Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, + None => ws.current()?.package_id(), + }; + Ok(PackageIdSpec::from_package_id(pkgid)) +} diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs new file mode 100644 index 000000000..aefe5b0ee --- /dev/null +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -0,0 +1,199 @@ +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; + +use core::{EitherManifest, Package, PackageId, SourceId}; +use util::{self, Config}; +use util::errors::{CargoError, CargoResult}; +use util::important_paths::find_project_manifest_exact; +use util::toml::read_manifest; + +pub fn read_package( + path: &Path, + source_id: &SourceId, + config: &Config, +) -> CargoResult<(Package, Vec)> { + trace!( + "read_package; path={}; source-id={}", + path.display(), + source_id + ); + let (manifest, nested) = read_manifest(path, source_id, config)?; + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => bail!( + "found a virtual manifest at `{}` instead of a package \ + manifest", + path.display() + ), + }; + + Ok((Package::new(manifest, path), nested)) +} + +pub fn read_packages( + path: &Path, + source_id: &SourceId, + config: &Config, +) -> CargoResult> { + let mut all_packages = HashMap::new(); + let mut visited = HashSet::::new(); + let mut errors = Vec::::new(); + + trace!( + "looking for root package: {}, source_id={}", + path.display(), + source_id + ); + + walk(path, &mut |dir| { + trace!("looking for child package: {}", dir.display()); + + // Don't recurse into hidden/dot directories unless we're at the toplevel + if dir != path { + let name = dir.file_name().and_then(|s| s.to_str()); + if name.map(|s| s.starts_with('.')) == Some(true) { + return Ok(false); + } + + // Don't automatically discover packages across git submodules + if fs::metadata(&dir.join(".git")).is_ok() { + return Ok(false); + } + } + + // Don't ever look at target directories + if dir.file_name().and_then(|s| s.to_str()) == Some("target") + && has_manifest(dir.parent().unwrap()) + { + return Ok(false); + } + + if has_manifest(dir) { + read_nested_packages( + dir, + &mut all_packages, + source_id, + config, + &mut visited, + &mut errors, + )?; + } + Ok(true) + })?; + + if all_packages.is_empty() { + match errors.pop() { + Some(err) => Err(err), + None => Err(format_err!( + "Could not find Cargo.toml in `{}`", + path.display() + )), + } + } else { + Ok(all_packages.into_iter().map(|(_, v)| v).collect()) + } +} + +fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) -> CargoResult<()> { + if !callback(path)? { + trace!("not processing {}", path.display()); + return Ok(()); + } + + // Ignore any permission denied errors because temporary directories + // can often have some weird permissions on them. + let dirs = match fs::read_dir(path) { + Ok(dirs) => dirs, + Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()), + Err(e) => { + let cx = format!("failed to read directory `{}`", path.display()); + let e = CargoError::from(e); + return Err(e.context(cx).into()); + } + }; + for dir in dirs { + let dir = dir?; + if dir.file_type()?.is_dir() { + walk(&dir.path(), callback)?; + } + } + Ok(()) +} + +fn has_manifest(path: &Path) -> bool { + find_project_manifest_exact(path, "Cargo.toml").is_ok() +} + +fn read_nested_packages( + path: &Path, + all_packages: &mut HashMap, + source_id: &SourceId, + config: &Config, + visited: &mut HashSet, + errors: &mut Vec, +) -> CargoResult<()> { + if !visited.insert(path.to_path_buf()) { + return Ok(()); + } + + let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; + + let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) { + Err(err) => { + // Ignore malformed manifests found on git repositories + // + // git source try to find and read all manifests from the repository + // but since it's not possible to exclude folders from this search + // it's safer to ignore malformed manifests to avoid + // + // TODO: Add a way to exclude folders? + info!( + "skipping malformed package found at `{}`", + path.to_string_lossy() + ); + errors.push(err); + return Ok(()); + } + Ok(tuple) => tuple, + }; + + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => return Ok(()), + }; + let pkg = Package::new(manifest, &manifest_path); + + let pkg_id = pkg.package_id().clone(); + use std::collections::hash_map::Entry; + match all_packages.entry(pkg_id) { + Entry::Vacant(v) => { + v.insert(pkg); + } + Entry::Occupied(_) => { + info!( + "skipping nested package `{}` found at `{}`", + pkg.name(), + path.to_string_lossy() + ); + } + } + + // Registry sources are not allowed to have `path=` dependencies because + // they're all translated to actual registry dependencies. + // + // We normalize the path here ensure that we don't infinitely walk around + // looking for crates. By normalizing we ensure that we visit this crate at + // most once. + // + // TODO: filesystem/symlink implications? + if !source_id.is_registry() { + for p in nested.iter() { + let path = util::normalize_path(&path.join(p)); + read_nested_packages(&path, all_packages, source_id, config, visited, errors)?; + } + } + + Ok(()) +} diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs new file mode 100644 index 000000000..4bd311808 --- /dev/null +++ b/src/cargo/ops/cargo_run.rs @@ -0,0 +1,89 @@ +use std::path::Path; + +use ops::{self, Packages}; +use util::{self, CargoResult, ProcessError}; +use core::Workspace; + +pub fn run( + ws: &Workspace, + options: &ops::CompileOptions, + args: &[String], +) -> CargoResult> { + let config = ws.config(); + + let pkg = match options.spec { + Packages::All | Packages::Default | Packages::OptOut(_) => { + unreachable!("cargo run supports single package only") + } + Packages::Packages(ref xs) => match xs.len() { + 0 => ws.current()?, + 1 => ws.members() + .find(|pkg| &*pkg.name() == xs[0]) + .ok_or_else(|| { + format_err!("package `{}` is not a member of the workspace", xs[0]) + })?, + _ => unreachable!("cargo run supports single package only"), + }, + }; + + let bins: Vec<_> = pkg.manifest() + .targets() + .iter() + .filter(|a| { + !a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() { + a.is_bin() + } else { + options.filter.target_run(a) + } + }) + .map(|bin| bin.name()) + .collect(); + + if bins.is_empty() { + if !options.filter.is_specific() { + bail!("a bin target must be available for `cargo run`") + } else { + // this will be verified in cargo_compile + } + } + if bins.len() > 1 { + if !options.filter.is_specific() { + bail!( + "`cargo run` requires that a project only have one \ + executable; use the `--bin` option to specify which one \ + to run\navailable binaries: {}", + bins.join(", ") + ) + } else { + bail!( + "`cargo run` can run at most one executable, but \ + multiple were specified" + ) + } + } + + let compile = ops::compile(ws, options)?; + assert_eq!(compile.binaries.len(), 1); + let exe = &compile.binaries[0]; + let exe = match util::without_prefix(exe, config.cwd()) { + Some(path) if path.file_name() == Some(path.as_os_str()) => { + Path::new(".").join(path).to_path_buf() + } + Some(path) => path.to_path_buf(), + None => exe.to_path_buf(), + }; + let mut process = compile.target_process(exe, pkg)?; + process.args(args).cwd(config.cwd()); + + config.shell().status("Running", process.to_string())?; + + let result = process.exec_replace(); + + match result { + Ok(()) => Ok(None), + Err(e) => { + let err = e.downcast::()?; + Ok(Some(err)) + } + } +} diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs new file mode 100644 index 000000000..b8dcdb5ec --- /dev/null +++ b/src/cargo/ops/cargo_rustc/compilation.rs @@ -0,0 +1,214 @@ +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::ffi::OsStr; +use std::path::PathBuf; + +use semver::Version; +use lazycell::LazyCell; + +use core::{Package, PackageId, Target, TargetKind}; +use util::{self, join_paths, process, CargoResult, Config, ProcessBuilder}; + +/// A structure returning the result of a compilation. +pub struct Compilation<'cfg> { + /// A mapping from a package to the list of libraries that need to be + /// linked when working with that package. + pub libraries: HashMap>, + + /// An array of all tests created during this compilation. + pub tests: Vec<(Package, TargetKind, String, PathBuf)>, + + /// An array of all binaries created. + pub binaries: Vec, + + /// All directories for the output of native build commands. + /// + /// This is currently used to drive some entries which are added to the + /// LD_LIBRARY_PATH as appropriate. + /// + /// The order should be deterministic. + // TODO: deprecated, remove + pub native_dirs: BTreeSet, + + /// Root output directory (for the local package's artifacts) + pub root_output: PathBuf, + + /// Output directory for rust dependencies. + /// May be for the host or for a specific target. + pub deps_output: PathBuf, + + /// Output directory for the rust host dependencies. + pub host_deps_output: PathBuf, + + /// The path to rustc's own libstd + pub host_dylib_path: Option, + + /// The path to libstd for the target + pub target_dylib_path: Option, + + /// Extra environment variables that were passed to compilations and should + /// be passed to future invocations of programs. + pub extra_env: HashMap>, + + pub to_doc_test: Vec, + + /// Features per package enabled during this compilation. + pub cfgs: HashMap>, + + /// Flags to pass to rustdoc when invoked from cargo test, per package. + pub rustdocflags: HashMap>, + + pub target: String, + + config: &'cfg Config, + + target_runner: LazyCell)>>, +} + +impl<'cfg> Compilation<'cfg> { + pub fn new(config: &'cfg Config) -> Compilation<'cfg> { + Compilation { + libraries: HashMap::new(), + native_dirs: BTreeSet::new(), // TODO: deprecated, remove + root_output: PathBuf::from("/"), + deps_output: PathBuf::from("/"), + host_deps_output: PathBuf::from("/"), + host_dylib_path: None, + target_dylib_path: None, + tests: Vec::new(), + binaries: Vec::new(), + extra_env: HashMap::new(), + to_doc_test: Vec::new(), + cfgs: HashMap::new(), + rustdocflags: HashMap::new(), + config, + target: String::new(), + target_runner: LazyCell::new(), + } + } + + /// See `process`. + pub fn rustc_process(&self, pkg: &Package) -> CargoResult { + self.fill_env(self.config.rustc()?.process(), pkg, true) + } + + /// See `process`. + pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult { + self.fill_env(process(&*self.config.rustdoc()?), pkg, false) + } + + /// See `process`. + pub fn host_process>( + &self, + cmd: T, + pkg: &Package, + ) -> CargoResult { + self.fill_env(process(cmd), pkg, true) + } + + fn target_runner(&self) -> CargoResult<&Option<(PathBuf, Vec)>> { + self.target_runner.try_borrow_with(|| { + let key = format!("target.{}.runner", self.target); + Ok(self.config.get_path_and_args(&key)?.map(|v| v.val)) + }) + } + + /// See `process`. + pub fn target_process>( + &self, + cmd: T, + pkg: &Package, + ) -> CargoResult { + let builder = if let Some((ref runner, ref args)) = *self.target_runner()? { + let mut builder = process(runner); + builder.args(args); + builder.arg(cmd); + builder + } else { + process(cmd) + }; + self.fill_env(builder, pkg, false) + } + + /// Prepares a new process with an appropriate environment to run against + /// the artifacts produced by the build process. + /// + /// The package argument is also used to configure environment variables as + /// well as the working directory of the child process. + fn fill_env( + &self, + mut cmd: ProcessBuilder, + pkg: &Package, + is_host: bool, + ) -> CargoResult { + let mut search_path = if is_host { + let mut search_path = vec![self.host_deps_output.clone()]; + search_path.extend(self.host_dylib_path.clone()); + search_path + } else { + let mut search_path = + super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output); + search_path.push(self.root_output.clone()); + search_path.push(self.deps_output.clone()); + search_path.extend(self.target_dylib_path.clone()); + search_path + }; + + search_path.extend(util::dylib_path().into_iter()); + let search_path = join_paths(&search_path, util::dylib_path_envvar())?; + + cmd.env(util::dylib_path_envvar(), &search_path); + if let Some(env) = self.extra_env.get(pkg.package_id()) { + for &(ref k, ref v) in env { + cmd.env(k, v); + } + } + + let metadata = pkg.manifest().metadata(); + + let cargo_exe = self.config.cargo_exe()?; + cmd.env(::CARGO_ENV, cargo_exe); + + // When adding new environment variables depending on + // crate properties which might require rebuild upon change + // consider adding the corresponding properties to the hash + // in Context::target_metadata() + cmd.env("CARGO_MANIFEST_DIR", pkg.root()) + .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) + .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) + .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) + .env( + "CARGO_PKG_VERSION_PRE", + &pre_version_component(pkg.version()), + ) + .env("CARGO_PKG_VERSION", &pkg.version().to_string()) + .env("CARGO_PKG_NAME", &*pkg.name()) + .env( + "CARGO_PKG_DESCRIPTION", + metadata.description.as_ref().unwrap_or(&String::new()), + ) + .env( + "CARGO_PKG_HOMEPAGE", + metadata.homepage.as_ref().unwrap_or(&String::new()), + ) + .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) + .cwd(pkg.root()); + Ok(cmd) + } +} + +fn pre_version_component(v: &Version) -> String { + if v.pre.is_empty() { + return String::new(); + } + + let mut ret = String::new(); + + for (i, x) in v.pre.iter().enumerate() { + if i != 0 { + ret.push('.') + }; + ret.push_str(&x.to_string()); + } + + ret +} diff --git a/src/cargo/ops/cargo_rustc/context/mod.rs b/src/cargo/ops/cargo_rustc/context/mod.rs new file mode 100644 index 000000000..7bb9b088f --- /dev/null +++ b/src/cargo/ops/cargo_rustc/context/mod.rs @@ -0,0 +1,1179 @@ +#![allow(deprecated)] + +use std::collections::{HashMap, HashSet}; +use std::collections::hash_map::Entry; +use std::env; +use std::fmt; +use std::hash::{Hash, Hasher, SipHasher}; +use std::path::{Path, PathBuf}; +use std::str::{self, FromStr}; +use std::sync::Arc; +use std::cell::RefCell; + +use jobserver::Client; + +use core::{Package, PackageId, PackageSet, Profile, Resolve, Target}; +use core::{Dependency, Profiles, TargetKind, Workspace}; +use util::{self, internal, profile, Cfg, CfgExpr, Config, ProcessBuilder}; +use util::errors::{CargoResult, CargoResultExt}; + +use super::TargetConfig; +use super::custom_build::{BuildDeps, BuildScripts, BuildState}; +use super::fingerprint::Fingerprint; +use super::layout::Layout; +use super::links::Links; +use super::{BuildConfig, Compilation, Kind}; + +mod unit_dependencies; +use self::unit_dependencies::build_unit_dependencies; + +/// All information needed to define a Unit. +/// +/// A unit is an object that has enough information so that cargo knows how to build it. +/// For example, if your project has dependencies, then every dependency will be built as a library +/// unit. If your project is a library, then it will be built as a library unit as well, or if it +/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types +/// for `test`ing and `check`ing, amongst others. +/// +/// The unit also holds information about all possible metadata about the package in `pkg`. +/// +/// A unit needs to know extra information in addition to the type and root source file. For +/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know +/// whether you want a debug or release build. There is enough information in this struct to figure +/// all that out. +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub struct Unit<'a> { + /// Information about available targets, which files to include/exclude, etc. Basically stuff in + /// `Cargo.toml`. + pub pkg: &'a Package, + /// Information about the specific target to build, out of the possible targets in `pkg`. Not + /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a + /// build. + pub target: &'a Target, + /// The profile contains information about *how* the build should be run, including debug + /// level, extra args to pass to rustc, etc. + pub profile: &'a Profile, + /// Whether this compilation unit is for the host or target architecture. + /// + /// For example, when + /// cross compiling and using a custom build script, the build script needs to be compiled for + /// the host architecture so the host rustc can use it (when compiling to the target + /// architecture). + pub kind: Kind, +} + +/// Type of each file generated by a Unit. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum TargetFileType { + /// Not a special file type. + Normal, + /// It is something you can link against (e.g. a library) + Linkable, + /// It is a piece of external debug information (e.g. *.dSYM and *.pdb) + DebugInfo, +} + +/// The build context, containing all information about a build task +pub struct Context<'a, 'cfg: 'a> { + /// The workspace the build is for + pub ws: &'a Workspace<'cfg>, + /// The cargo configuration + pub config: &'cfg Config, + /// The dependency graph for our build + pub resolve: &'a Resolve, + /// Information on the compilation output + pub compilation: Compilation<'cfg>, + pub packages: &'a PackageSet<'cfg>, + pub build_state: Arc, + pub build_script_overridden: HashSet<(PackageId, Kind)>, + pub build_explicit_deps: HashMap, BuildDeps>, + pub fingerprints: HashMap, Arc>, + pub compiled: HashSet>, + pub build_config: BuildConfig, + pub build_scripts: HashMap, Arc>, + pub links: Links<'a>, + pub used_in_plugin: HashSet>, + pub jobserver: Client, + + /// The target directory layout for the host (and target if it is the same as host) + host: Layout, + /// The target directory layout for the target (if different from then host) + target: Option, + target_info: TargetInfo, + host_info: TargetInfo, + profiles: &'a Profiles, + incremental_env: Option, + + unit_dependencies: HashMap, Vec>>, + /// For each Unit, a list all files produced as a triple of + /// + /// - File name that will be produced by the build process (in `deps`) + /// - If it should be linked into `target`, and what it should be called (e.g. without + /// metadata). + /// - Type of the file (library / debug symbol / else) + target_filenames: HashMap, Arc, TargetFileType)>>>, + target_metadatas: HashMap, Option>, +} + +#[derive(Clone, Default)] +struct TargetInfo { + crate_type_process: Option, + crate_types: RefCell>>, + cfg: Option>, + sysroot_libdir: Option, +} + +impl TargetInfo { + fn discover_crate_type(&self, crate_type: &str) -> CargoResult> { + let mut process = self.crate_type_process.clone().unwrap(); + + process.arg("--crate-type").arg(crate_type); + + let output = process.exec_with_output().chain_err(|| { + format!( + "failed to run `rustc` to learn about \ + crate-type {} information", + crate_type + ) + })?; + + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + Ok(parse_crate_type(crate_type, error, &mut output.lines())?) + } +} + +#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] +pub struct Metadata(u64); + +impl<'a, 'cfg> Context<'a, 'cfg> { + pub fn new( + ws: &'a Workspace<'cfg>, + resolve: &'a Resolve, + packages: &'a PackageSet<'cfg>, + config: &'cfg Config, + build_config: BuildConfig, + profiles: &'a Profiles, + units: &[Unit<'a>], + ) -> CargoResult> { + let dest = if build_config.release { + "release" + } else { + "debug" + }; + let host_layout = Layout::new(ws, None, dest)?; + let target_layout = match build_config.requested_target.as_ref() { + Some(target) => Some(Layout::new(ws, Some(target), dest)?), + None => None, + }; + + let incremental_env = match env::var("CARGO_INCREMENTAL") { + Ok(v) => Some(v == "1"), + Err(_) => None, + }; + + // Load up the jobserver that we'll use to manage our parallelism. This + // is the same as the GNU make implementation of a jobserver, and + // intentionally so! It's hoped that we can interact with GNU make and + // all share the same jobserver. + // + // Note that if we don't have a jobserver in our environment then we + // create our own, and we create it with `n-1` tokens because one token + // is ourself, a running process. + let jobserver = match config.jobserver_from_env() { + Some(c) => c.clone(), + None => Client::new(build_config.jobs as usize - 1) + .chain_err(|| "failed to create jobserver")?, + }; + let mut cx = Context { + ws, + host: host_layout, + target: target_layout, + resolve, + packages, + config, + target_info: TargetInfo::default(), + host_info: TargetInfo::default(), + compilation: Compilation::new(config), + build_state: Arc::new(BuildState::new(&build_config)), + build_config, + fingerprints: HashMap::new(), + profiles, + compiled: HashSet::new(), + build_scripts: HashMap::new(), + build_explicit_deps: HashMap::new(), + links: Links::new(), + used_in_plugin: HashSet::new(), + incremental_env, + jobserver, + build_script_overridden: HashSet::new(), + + unit_dependencies: HashMap::new(), + // TODO: Pre-Calculate these with a topo-sort, rather than lazy-calculating + target_filenames: HashMap::new(), + target_metadatas: HashMap::new(), + }; + + cx.probe_target_info()?; + let deps = build_unit_dependencies(units, &cx)?; + cx.unit_dependencies = deps; + + Ok(cx) + } + + /// Prepare this context, ensuring that all filesystem directories are in + /// place. + pub fn prepare(&mut self) -> CargoResult<()> { + let _p = profile::start("preparing layout"); + + self.host + .prepare() + .chain_err(|| internal("couldn't prepare build directories"))?; + if let Some(ref mut target) = self.target { + target + .prepare() + .chain_err(|| internal("couldn't prepare build directories"))?; + } + + self.compilation.host_deps_output = self.host.deps().to_path_buf(); + + let layout = self.target.as_ref().unwrap_or(&self.host); + self.compilation.root_output = layout.dest().to_path_buf(); + self.compilation.deps_output = layout.deps().to_path_buf(); + Ok(()) + } + + /// Ensure that we've collected all target-specific information to compile + /// all the units mentioned in `units`. + fn probe_target_info(&mut self) -> CargoResult<()> { + debug!("probe_target_info"); + let host_target_same = match self.requested_target() { + Some(s) if s != self.config.rustc()?.host => false, + _ => true, + }; + + if host_target_same { + let info = self.probe_target_info_kind(Kind::Target)?; + self.host_info = info.clone(); + self.target_info = info; + } else { + self.host_info = self.probe_target_info_kind(Kind::Host)?; + self.target_info = self.probe_target_info_kind(Kind::Target)?; + } + self.compilation.host_dylib_path = self.host_info.sysroot_libdir.clone(); + self.compilation.target_dylib_path = self.target_info.sysroot_libdir.clone(); + Ok(()) + } + + fn probe_target_info_kind(&self, kind: Kind) -> CargoResult { + let rustflags = env_args( + self.config, + &self.build_config, + self.info(&kind), + kind, + "RUSTFLAGS", + )?; + let mut process = self.config.rustc()?.process(); + process + .arg("-") + .arg("--crate-name") + .arg("___") + .arg("--print=file-names") + .args(&rustflags) + .env_remove("RUST_LOG"); + + if kind == Kind::Target { + process.arg("--target").arg(&self.target_triple()); + } + + let crate_type_process = process.clone(); + const KNOWN_CRATE_TYPES: &[&str] = + &["bin", "rlib", "dylib", "cdylib", "staticlib", "proc-macro"]; + for crate_type in KNOWN_CRATE_TYPES.iter() { + process.arg("--crate-type").arg(crate_type); + } + + let mut with_cfg = process.clone(); + with_cfg.arg("--print=sysroot"); + with_cfg.arg("--print=cfg"); + + let mut has_cfg_and_sysroot = true; + let output = with_cfg + .exec_with_output() + .or_else(|_| { + has_cfg_and_sysroot = false; + process.exec_with_output() + }) + .chain_err(|| "failed to run `rustc` to learn about target-specific information")?; + + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + let mut lines = output.lines(); + let mut map = HashMap::new(); + for crate_type in KNOWN_CRATE_TYPES { + let out = parse_crate_type(crate_type, error, &mut lines)?; + map.insert(crate_type.to_string(), out); + } + + let mut sysroot_libdir = None; + if has_cfg_and_sysroot { + let line = match lines.next() { + Some(line) => line, + None => bail!( + "output of --print=sysroot missing when learning about \ + target-specific information from rustc" + ), + }; + let mut rustlib = PathBuf::from(line); + if kind == Kind::Host { + if cfg!(windows) { + rustlib.push("bin"); + } else { + rustlib.push("lib"); + } + sysroot_libdir = Some(rustlib); + } else { + rustlib.push("lib"); + rustlib.push("rustlib"); + rustlib.push(self.target_triple()); + rustlib.push("lib"); + sysroot_libdir = Some(rustlib); + } + } + + let cfg = if has_cfg_and_sysroot { + Some(lines.map(Cfg::from_str).collect::>()?) + } else { + None + }; + + Ok(TargetInfo { + crate_type_process: Some(crate_type_process), + crate_types: RefCell::new(map), + cfg, + sysroot_libdir, + }) + } + + /// Builds up the `used_in_plugin` internal to this context from the list of + /// top-level units. + /// + /// This will recursively walk `units` and all of their dependencies to + /// determine which crate are going to be used in plugins or not. + pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) -> CargoResult<()> { + let mut visited = HashSet::new(); + for unit in units { + self.walk_used_in_plugin_map(unit, unit.target.for_host(), &mut visited)?; + } + Ok(()) + } + + fn walk_used_in_plugin_map( + &mut self, + unit: &Unit<'a>, + is_plugin: bool, + visited: &mut HashSet<(Unit<'a>, bool)>, + ) -> CargoResult<()> { + if !visited.insert((*unit, is_plugin)) { + return Ok(()); + } + if is_plugin { + self.used_in_plugin.insert(*unit); + } + for unit in self.dep_targets(unit) { + self.walk_used_in_plugin_map(&unit, is_plugin || unit.target.for_host(), visited)?; + } + Ok(()) + } + + /// Returns the appropriate directory layout for either a plugin or not. + fn layout(&self, kind: Kind) -> &Layout { + match kind { + Kind::Host => &self.host, + Kind::Target => self.target.as_ref().unwrap_or(&self.host), + } + } + + /// Returns the directories where Rust crate dependencies are found for the + /// specified unit. + pub fn deps_dir(&self, unit: &Unit) -> &Path { + self.layout(unit.kind).deps() + } + + /// Returns the directory for the specified unit where fingerprint + /// information is stored. + pub fn fingerprint_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + let dir = self.pkg_dir(unit); + self.layout(unit.kind).fingerprint().join(dir) + } + + /// Returns the appropriate directory layout for either a plugin or not. + pub fn build_script_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(!unit.profile.run_custom_build); + let dir = self.pkg_dir(unit); + self.layout(Kind::Host).build().join(dir) + } + + /// Returns the appropriate directory layout for either a plugin or not. + pub fn build_script_out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(unit.profile.run_custom_build); + let dir = self.pkg_dir(unit); + self.layout(unit.kind).build().join(dir).join("out") + } + + pub fn host_deps(&self) -> &Path { + self.host.deps() + } + + /// Return the root of the build output tree + pub fn target_root(&self) -> &Path { + self.host.dest() + } + + /// Returns the appropriate output directory for the specified package and + /// target. + pub fn out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { + if unit.profile.doc { + self.layout(unit.kind).root().parent().unwrap().join("doc") + } else if unit.target.is_custom_build() { + self.build_script_dir(unit) + } else if unit.target.is_example() { + self.layout(unit.kind).examples().to_path_buf() + } else { + self.deps_dir(unit).to_path_buf() + } + } + + fn pkg_dir(&mut self, unit: &Unit<'a>) -> String { + let name = unit.pkg.package_id().name(); + match self.target_metadata(unit) { + Some(meta) => format!("{}-{}", name, meta), + None => format!("{}-{}", name, self.target_short_hash(unit)), + } + } + + /// Return the host triple for this context + pub fn host_triple(&self) -> &str { + &self.build_config.host_triple + } + + /// Return the target triple which this context is targeting. + pub fn target_triple(&self) -> &str { + self.requested_target() + .unwrap_or_else(|| self.host_triple()) + } + + /// Requested (not actual) target for the build + pub fn requested_target(&self) -> Option<&str> { + self.build_config.requested_target.as_ref().map(|s| &s[..]) + } + + /// Get the short hash based only on the PackageId + /// Used for the metadata when target_metadata returns None + pub fn target_short_hash(&self, unit: &Unit) -> String { + let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); + util::short_hash(&hashable) + } + + /// Get the metadata for a target in a specific profile + /// We build to the path: "{filename}-{target_metadata}" + /// We use a linking step to link/copy to a predictable filename + /// like `target/debug/libfoo.{a,so,rlib}` and such. + pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option { + if let Some(cache) = self.target_metadatas.get(unit) { + return cache.clone(); + } + + let metadata = self.calc_target_metadata(unit); + self.target_metadatas.insert(*unit, metadata.clone()); + metadata + } + + fn calc_target_metadata(&mut self, unit: &Unit<'a>) -> Option { + // No metadata for dylibs because of a couple issues + // - OSX encodes the dylib name in the executable + // - Windows rustc multiple files of which we can't easily link all of them + // + // No metadata for bin because of an issue + // - wasm32 rustc/emcc encodes the .wasm name in the .js (rust-lang/cargo#4535) + // + // Two exceptions + // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict) + // 2) __CARGO_DEFAULT_LIB_METADATA env var + // + // Note, though, that the compiler's build system at least wants + // path dependencies (eg libstd) to have hashes in filenames. To account for + // that we have an extra hack here which reads the + // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a + // hash in the filename if that's present. + // + // This environment variable should not be relied on! It's + // just here for rustbuild. We need a more principled method + // doing this eventually. + let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); + if !(unit.profile.test || unit.profile.check) + && (unit.target.is_dylib() || unit.target.is_cdylib() + || (unit.target.is_bin() && self.target_triple().starts_with("wasm32-"))) + && unit.pkg.package_id().source_id().is_path() + && !__cargo_default_lib_metadata.is_ok() + { + return None; + } + + let mut hasher = SipHasher::new_with_keys(0, 0); + + // Unique metadata per (name, source, version) triple. This'll allow us + // to pull crates from anywhere w/o worrying about conflicts + unit.pkg + .package_id() + .stable_hash(self.ws.root()) + .hash(&mut hasher); + + // Add package properties which map to environment variables + // exposed by Cargo + let manifest_metadata = unit.pkg.manifest().metadata(); + manifest_metadata.authors.hash(&mut hasher); + manifest_metadata.description.hash(&mut hasher); + manifest_metadata.homepage.hash(&mut hasher); + + // Also mix in enabled features to our metadata. This'll ensure that + // when changing feature sets each lib is separately cached. + self.resolve + .features_sorted(unit.pkg.package_id()) + .hash(&mut hasher); + + // Mix in the target-metadata of all the dependencies of this target + { + let mut deps_metadata = self.dep_targets(unit) + .iter() + .map(|dep_unit| self.target_metadata(dep_unit)) + .collect::>(); + deps_metadata.sort(); + deps_metadata.hash(&mut hasher); + } + + // Throw in the profile we're compiling with. This helps caching + // panic=abort and panic=unwind artifacts, additionally with various + // settings like debuginfo and whatnot. + unit.profile.hash(&mut hasher); + + // Artifacts compiled for the host should have a different metadata + // piece than those compiled for the target, so make sure we throw in + // the unit's `kind` as well + unit.kind.hash(&mut hasher); + + // Finally throw in the target name/kind. This ensures that concurrent + // compiles of targets in the same crate don't collide. + unit.target.name().hash(&mut hasher); + unit.target.kind().hash(&mut hasher); + + if let Ok(rustc) = self.config.rustc() { + rustc.verbose_version.hash(&mut hasher); + } + + // Seed the contents of __CARGO_DEFAULT_LIB_METADATA to the hasher if present. + // This should be the release channel, to get a different hash for each channel. + if let Ok(ref channel) = __cargo_default_lib_metadata { + channel.hash(&mut hasher); + } + + Some(Metadata(hasher.finish())) + } + + /// Returns the file stem for a given target/profile combo (with metadata) + pub fn file_stem(&mut self, unit: &Unit<'a>) -> String { + match self.target_metadata(unit) { + Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), + None => self.bin_stem(unit), + } + } + + /// Returns the bin stem for a given target (without metadata) + fn bin_stem(&self, unit: &Unit) -> String { + if unit.target.allows_underscores() { + unit.target.name().to_string() + } else { + unit.target.crate_name() + } + } + + /// Returns a tuple with the directory and name of the hard link we expect + /// our target to be copied to. Eg, file_stem may be out_dir/deps/foo-abcdef + /// and link_stem would be out_dir/foo + /// This function returns it in two parts so the caller can add prefix/suffix + /// to filename separately + /// + /// Returns an Option because in some cases we don't want to link + /// (eg a dependent lib) + pub fn link_stem(&mut self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { + let src_dir = self.out_dir(unit); + let bin_stem = self.bin_stem(unit); + let file_stem = self.file_stem(unit); + + // We currently only lift files up from the `deps` directory. If + // it was compiled into something like `example/` or `doc/` then + // we don't want to link it up. + if src_dir.ends_with("deps") { + // Don't lift up library dependencies + if self.ws.members().find(|&p| p == unit.pkg).is_none() && !unit.target.is_bin() { + None + } else { + Some(( + src_dir.parent().unwrap().to_owned(), + if unit.profile.test { + file_stem + } else { + bin_stem + }, + )) + } + } else if bin_stem == file_stem { + None + } else if src_dir.ends_with("examples") || src_dir.parent().unwrap().ends_with("build") { + Some((src_dir, bin_stem)) + } else { + None + } + } + + /// Return the filenames that the given target for the given profile will + /// generate as a list of 3-tuples (filename, link_dst, linkable) + /// + /// - filename: filename rustc compiles to. (Often has metadata suffix). + /// - link_dst: Optional file to link/copy the result to (without metadata suffix) + /// - linkable: Whether possible to link against file (eg it's a library) + pub fn target_filenames( + &mut self, + unit: &Unit<'a>, + ) -> CargoResult, TargetFileType)>>> { + if let Some(cache) = self.target_filenames.get(unit) { + return Ok(Arc::clone(cache)); + } + + let result = self.calc_target_filenames(unit); + if let Ok(ref ret) = result { + self.target_filenames.insert(*unit, Arc::clone(ret)); + } + result + } + + fn calc_target_filenames( + &mut self, + unit: &Unit<'a>, + ) -> CargoResult, TargetFileType)>>> { + let out_dir = self.out_dir(unit); + let stem = self.file_stem(unit); + let link_stem = self.link_stem(unit); + let info = if unit.target.for_host() { + &self.host_info + } else { + &self.target_info + }; + + let mut ret = Vec::new(); + let mut unsupported = Vec::new(); + { + if unit.profile.check { + let filename = out_dir.join(format!("lib{}.rmeta", stem)); + let link_dst = link_stem + .clone() + .map(|(ld, ls)| ld.join(format!("lib{}.rmeta", ls))); + ret.push((filename, link_dst, TargetFileType::Linkable)); + } else { + let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> { + let crate_type = if crate_type == "lib" { + "rlib" + } else { + crate_type + }; + let mut crate_types = info.crate_types.borrow_mut(); + let entry = crate_types.entry(crate_type.to_string()); + let crate_type_info = match entry { + Entry::Occupied(o) => &*o.into_mut(), + Entry::Vacant(v) => { + let value = info.discover_crate_type(v.key())?; + &*v.insert(value) + } + }; + match *crate_type_info { + Some((ref prefix, ref suffix)) => { + let suffixes = add_target_specific_suffixes( + self.target_triple(), + crate_type, + unit.target.kind(), + suffix, + file_type, + ); + for (suffix, file_type, should_replace_hyphens) in suffixes { + // wasm bin target will generate two files in deps such as + // "web-stuff.js" and "web_stuff.wasm". Note the different usages of + // "-" and "_". should_replace_hyphens is a flag to indicate that + // we need to convert the stem "web-stuff" to "web_stuff", so we + // won't miss "web_stuff.wasm". + let conv = |s: String| { + if should_replace_hyphens { + s.replace("-", "_") + } else { + s + } + }; + let filename = out_dir.join(format!( + "{}{}{}", + prefix, + conv(stem.clone()), + suffix + )); + let link_dst = link_stem.clone().map(|(ld, ls)| { + ld.join(format!("{}{}{}", prefix, conv(ls), suffix)) + }); + ret.push((filename, link_dst, file_type)); + } + Ok(()) + } + // not supported, don't worry about it + None => { + unsupported.push(crate_type.to_string()); + Ok(()) + } + } + }; + //info!("{:?}", unit); + match *unit.target.kind() { + TargetKind::Bin + | TargetKind::CustomBuild + | TargetKind::ExampleBin + | TargetKind::Bench + | TargetKind::Test => { + add("bin", TargetFileType::Normal)?; + } + TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.profile.test => { + add("bin", TargetFileType::Normal)?; + } + TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => { + for kind in kinds { + add( + kind.crate_type(), + if kind.linkable() { + TargetFileType::Linkable + } else { + TargetFileType::Normal + }, + )?; + } + } + } + } + } + if ret.is_empty() { + if !unsupported.is_empty() { + bail!( + "cannot compile `{}` package, because target `{}` \ + does not support the `{}` crate type{}", + unit.pkg, + self.target_triple(), + unsupported.join(", "), + if unsupported.len() == 1 { "" } else { "s" } + ) + } + bail!( + "cannot compile `{}` as the target `{}` does not \ + support any of the output crate types", + unit.pkg, + self.target_triple() + ); + } + info!("Target filenames: {:?}", ret); + + Ok(Arc::new(ret)) + } + + /// For a package, return all targets which are registered as dependencies + /// for that package. + // TODO: this ideally should be `-> &[Unit<'a>]` + pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { + // If this build script's execution has been overridden then we don't + // actually depend on anything, we've reached the end of the dependency + // chain as we've got all the info we're gonna get. + // + // Note there's a subtlety about this piece of code! The + // `build_script_overridden` map here is populated in + // `custom_build::build_map` which you need to call before inspecting + // dependencies. However, that code itself calls this method and + // gets a full pre-filtered set of dependencies. This is not super + // obvious, and clear, but it does work at the moment. + if unit.profile.run_custom_build { + let key = (unit.pkg.package_id().clone(), unit.kind); + if self.build_script_overridden.contains(&key) { + return Vec::new(); + } + } + self.unit_dependencies[unit].clone() + } + + fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + let platform = match dep.platform() { + Some(p) => p, + None => return true, + }; + let (name, info) = match kind { + Kind::Host => (self.host_triple(), &self.host_info), + Kind::Target => (self.target_triple(), &self.target_info), + }; + platform.matches(name, info.cfg.as_ref().map(|cfg| &cfg[..])) + } + + /// Gets a package for the given package id. + pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> { + self.packages.get(id) + } + + /// Get the user-specified linker for a particular host or target + pub fn linker(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) + } + + /// Get the user-specified `ar` program for a particular host or target + pub fn ar(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) + } + + /// Get the list of cfg printed out from the compiler for the specified kind + pub fn cfg(&self, kind: Kind) -> &[Cfg] { + let info = match kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + }; + info.cfg.as_ref().map(|s| &s[..]).unwrap_or(&[]) + } + + /// Get the target configuration for a particular host or target + fn target_config(&self, kind: Kind) -> &TargetConfig { + match kind { + Kind::Host => &self.build_config.host, + Kind::Target => &self.build_config.target, + } + } + + /// Number of jobs specified for this build + pub fn jobs(&self) -> u32 { + self.build_config.jobs + } + + pub fn lib_profile(&self) -> &'a Profile { + let (normal, test) = if self.build_config.release { + (&self.profiles.release, &self.profiles.bench_deps) + } else { + (&self.profiles.dev, &self.profiles.test_deps) + }; + if self.build_config.test { + test + } else { + normal + } + } + + pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile { + // TODO: should build scripts always be built with the same library + // profile? How is this controlled at the CLI layer? + self.lib_profile() + } + + pub fn incremental_args(&self, unit: &Unit) -> CargoResult> { + // There's a number of ways to configure incremental compilation right + // now. In order of descending priority (first is highest priority) we + // have: + // + // * `CARGO_INCREMENTAL` - this is blanket used unconditionally to turn + // on/off incremental compilation for any cargo subcommand. We'll + // respect this if set. + // * `build.incremental` - in `.cargo/config` this blanket key can + // globally for a system configure whether incremental compilation is + // enabled. Note that setting this to `true` will not actually affect + // all builds though. For example a `true` value doesn't enable + // release incremental builds, only dev incremental builds. This can + // be useful to globally disable incremental compilation like + // `CARGO_INCREMENTAL`. + // * `profile.dev.incremental` - in `Cargo.toml` specific profiles can + // be configured to enable/disable incremental compilation. This can + // be primarily used to disable incremental when buggy for a project. + // * Finally, each profile has a default for whether it will enable + // incremental compilation or not. Primarily development profiles + // have it enabled by default while release profiles have it disabled + // by default. + let global_cfg = self.config.get_bool("build.incremental")?.map(|c| c.val); + let incremental = match (self.incremental_env, global_cfg, unit.profile.incremental) { + (Some(v), _, _) => v, + (None, Some(false), _) => false, + (None, _, other) => other, + }; + + if !incremental { + return Ok(Vec::new()); + } + + // Only enable incremental compilation for sources the user can + // modify (aka path sources). For things that change infrequently, + // non-incremental builds yield better performance in the compiler + // itself (aka crates.io / git dependencies) + // + // (see also https://github.com/rust-lang/cargo/issues/3972) + if !unit.pkg.package_id().source_id().is_path() { + return Ok(Vec::new()); + } + + let dir = self.layout(unit.kind).incremental().display(); + Ok(vec!["-C".to_string(), format!("incremental={}", dir)]) + } + + pub fn rustflags_args(&self, unit: &Unit) -> CargoResult> { + env_args( + self.config, + &self.build_config, + self.info(&unit.kind), + unit.kind, + "RUSTFLAGS", + ) + } + + pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult> { + env_args( + self.config, + &self.build_config, + self.info(&unit.kind), + unit.kind, + "RUSTDOCFLAGS", + ) + } + + pub fn show_warnings(&self, pkg: &PackageId) -> bool { + pkg.source_id().is_path() || self.config.extra_verbose() + } + + fn info(&self, kind: &Kind) -> &TargetInfo { + match *kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + } + } +} + +/// Acquire extra flags to pass to the compiler from various locations. +/// +/// The locations are: +/// +/// - the `RUSTFLAGS` environment variable +/// +/// then if this was not found +/// +/// - `target.*.rustflags` from the manifest (Cargo.toml) +/// - `target.cfg(..).rustflags` from the manifest +/// +/// then if neither of these were found +/// +/// - `build.rustflags` from the manifest +/// +/// Note that if a `target` is specified, no args will be passed to host code (plugins, build +/// scripts, ...), even if it is the same as the target. +fn env_args( + config: &Config, + build_config: &BuildConfig, + target_info: &TargetInfo, + kind: Kind, + name: &str, +) -> CargoResult> { + // We *want* to apply RUSTFLAGS only to builds for the + // requested target architecture, and not to things like build + // scripts and plugins, which may be for an entirely different + // architecture. Cargo's present architecture makes it quite + // hard to only apply flags to things that are not build + // scripts and plugins though, so we do something more hacky + // instead to avoid applying the same RUSTFLAGS to multiple targets + // arches: + // + // 1) If --target is not specified we just apply RUSTFLAGS to + // all builds; they are all going to have the same target. + // + // 2) If --target *is* specified then we only apply RUSTFLAGS + // to compilation units with the Target kind, which indicates + // it was chosen by the --target flag. + // + // This means that, e.g. even if the specified --target is the + // same as the host, build scripts in plugins won't get + // RUSTFLAGS. + let compiling_with_target = build_config.requested_target.is_some(); + let is_target_kind = kind == Kind::Target; + + if compiling_with_target && !is_target_kind { + // This is probably a build script or plugin and we're + // compiling with --target. In this scenario there are + // no rustflags we can apply. + return Ok(Vec::new()); + } + + // First try RUSTFLAGS from the environment + if let Ok(a) = env::var(name) { + let args = a.split(' ') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(str::to_string); + return Ok(args.collect()); + } + + let mut rustflags = Vec::new(); + + let name = name.chars() + .flat_map(|c| c.to_lowercase()) + .collect::(); + // Then the target.*.rustflags value... + let target = build_config + .requested_target + .as_ref() + .unwrap_or(&build_config.host_triple); + let key = format!("target.{}.{}", target, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + // ...including target.'cfg(...)'.rustflags + if let Some(ref target_cfg) = target_info.cfg { + if let Some(table) = config.get_table("target")? { + let cfgs = table.val.keys().filter_map(|t| { + if t.starts_with("cfg(") && t.ends_with(')') { + let cfg = &t[4..t.len() - 1]; + CfgExpr::from_str(cfg).ok().and_then(|c| { + if c.matches(target_cfg) { + Some(t) + } else { + None + } + }) + } else { + None + } + }); + + // Note that we may have multiple matching `[target]` sections and + // because we're passing flags to the compiler this can affect + // cargo's caching and whether it rebuilds. Ensure a deterministic + // ordering through sorting for now. We may perhaps one day wish to + // ensure a deterministic ordering via the order keys were defined + // in files perhaps. + let mut cfgs = cfgs.collect::>(); + cfgs.sort(); + + for n in cfgs { + let key = format!("target.{}.{}", n, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + } + } + } + + if !rustflags.is_empty() { + return Ok(rustflags); + } + + // Then the build.rustflags value + let key = format!("build.{}", name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + return Ok(args.collect()); + } + + Ok(Vec::new()) +} + +impl fmt::Display for Metadata { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:016x}", self.0) + } +} + +/// Takes rustc output (using specialized command line args), and calculates the file prefix and +/// suffix for the given crate type, or returns None if the type is not supported. (e.g. for a +/// rust library like libcargo.rlib, prefix = "lib", suffix = "rlib"). +/// +/// The caller needs to ensure that the lines object is at the correct line for the given crate +/// type: this is not checked. +// This function can not handle more than 1 file per type (with wasm32-unknown-emscripten, there +// are 2 files for bin (.wasm and .js)) +fn parse_crate_type( + crate_type: &str, + error: &str, + lines: &mut str::Lines, +) -> CargoResult> { + let not_supported = error.lines().any(|line| { + (line.contains("unsupported crate type") || line.contains("unknown crate type")) + && line.contains(crate_type) + }); + if not_supported { + return Ok(None); + } + let line = match lines.next() { + Some(line) => line, + None => bail!( + "malformed output when learning about \ + crate-type {} information", + crate_type + ), + }; + let mut parts = line.trim().split("___"); + let prefix = parts.next().unwrap(); + let suffix = match parts.next() { + Some(part) => part, + None => bail!( + "output of --print=file-names has changed in \ + the compiler, cannot parse" + ), + }; + + Ok(Some((prefix.to_string(), suffix.to_string()))) +} + +// (not a rustdoc) +// Return a list of 3-tuples (suffix, file_type, should_replace_hyphens). +// +// should_replace_hyphens will be used by the caller to replace "-" with "_" +// in a bin_stem. See the caller side (calc_target_filenames()) for details. +fn add_target_specific_suffixes( + target_triple: &str, + crate_type: &str, + target_kind: &TargetKind, + suffix: &str, + file_type: TargetFileType, +) -> Vec<(String, TargetFileType, bool)> { + let mut ret = vec![(suffix.to_string(), file_type, false)]; + + // rust-lang/cargo#4500 + if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib") + && suffix == ".dll" + { + ret.push((".dll.lib".to_string(), TargetFileType::Normal, false)); + } + + // rust-lang/cargo#4535 + if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" { + ret.push((".wasm".to_string(), TargetFileType::Normal, true)); + } + + // rust-lang/cargo#4490, rust-lang/cargo#4960 + // - only uplift debuginfo for binaries. + // tests are run directly from target/debug/deps/ + // and examples are inside target/debug/examples/ which already have symbols next to them + // so no need to do anything. + if *target_kind == TargetKind::Bin { + if target_triple.contains("-apple-") { + ret.push((".dSYM".to_string(), TargetFileType::DebugInfo, false)); + } else if target_triple.ends_with("-msvc") { + ret.push((".pdb".to_string(), TargetFileType::DebugInfo, false)); + } + } + + ret +} diff --git a/src/cargo/ops/cargo_rustc/context/unit_dependencies.rs b/src/cargo/ops/cargo_rustc/context/unit_dependencies.rs new file mode 100644 index 000000000..2fe20cde0 --- /dev/null +++ b/src/cargo/ops/cargo_rustc/context/unit_dependencies.rs @@ -0,0 +1,301 @@ +//! Constructs the dependency graph for compilation. +//! +//! Rust code is typically organized as a set of Cargo packages. The +//! dependencies between the packages themselves are stored in the +//! `Resolve` struct. However, we can't use that information as is for +//! compilation! A package typically contains several targets, or crates, +//! and these targets has inter-dependencies. For example, you need to +//! compile the `lib` target before the `bin` one, and you need to compile +//! `build.rs` before either of those. +//! +//! So, we need to lower the `Resolve`, which specifies dependencies between +//! *packages*, to a graph of dependencies between their *targets*, and this +//! is exactly what this module is doing! Well, almost exactly: another +//! complication is that we might want to compile the same target several times +//! (for example, with and without tests), so we actually build a dependency +//! graph of `Unit`s, which capture these properties. + +use ops::Unit; +use std::collections::HashMap; +use CargoResult; +use core::dependency::Kind as DepKind; +use ops::{Context, Kind}; +use core::Target; +use core::Profile; + +pub fn build_unit_dependencies<'a, 'cfg>( + roots: &[Unit<'a>], + cx: &Context<'a, 'cfg>, +) -> CargoResult, Vec>>> { + let mut deps = HashMap::new(); + for unit in roots.iter() { + deps_of(unit, cx, &mut deps)?; + } + + Ok(deps) +} + +fn deps_of<'a, 'b, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, + deps: &'b mut HashMap, Vec>>, +) -> CargoResult<&'b [Unit<'a>]> { + if !deps.contains_key(unit) { + let unit_deps = compute_deps(unit, cx, deps)?; + deps.insert(*unit, unit_deps.clone()); + for unit in unit_deps { + deps_of(&unit, cx, deps)?; + } + } + Ok(deps[unit].as_ref()) +} + +/// For a package, return all targets which are registered as dependencies +/// for that package. +fn compute_deps<'a, 'b, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, + deps: &'b mut HashMap, Vec>>, +) -> CargoResult>> { + if unit.profile.run_custom_build { + return compute_deps_custom_build(unit, cx, deps); + } else if unit.profile.doc && !unit.profile.test { + return compute_deps_doc(unit, cx); + } + + let id = unit.pkg.package_id(); + let deps = cx.resolve.deps(id); + let mut ret = deps.filter(|dep| { + unit.pkg + .dependencies() + .iter() + .filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version())) + .any(|d| { + // If this target is a build command, then we only want build + // dependencies, otherwise we want everything *other than* build + // dependencies. + if unit.target.is_custom_build() != d.is_build() { + return false; + } + + // If this dependency is *not* a transitive dependency, then it + // only applies to test/example targets + if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example() + && !unit.profile.test + { + return false; + } + + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + if !cx.dep_platform_activated(d, unit.kind) { + return false; + } + + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if d.is_optional() && !cx.resolve.features(id).contains(&*d.name()) { + return false; + } + + // If we've gotten past all that, then this dependency is + // actually used! + true + }) + }).filter_map(|id| match cx.get_package(id) { + Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| { + let unit = Unit { + pkg, + target: t, + profile: lib_or_check_profile(unit, t, cx), + kind: unit.kind.for_target(t), + }; + Ok(unit) + }), + Err(e) => Some(Err(e)), + }) + .collect::>>()?; + + // If this target is a build script, then what we've collected so far is + // all we need. If this isn't a build script, then it depends on the + // build script if there is one. + if unit.target.is_custom_build() { + return Ok(ret); + } + ret.extend(dep_build_script(unit, cx)); + + // If this target is a binary, test, example, etc, then it depends on + // the library of the same package. The call to `resolve.deps` above + // didn't include `pkg` in the return values, so we need to special case + // it here and see if we need to push `(pkg, pkg_lib_target)`. + if unit.target.is_lib() && !unit.profile.doc { + return Ok(ret); + } + ret.extend(maybe_lib(unit, cx)); + + // Integration tests/benchmarks require binaries to be built + if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) { + ret.extend( + unit.pkg + .targets() + .iter() + .filter(|t| { + let no_required_features = Vec::new(); + + t.is_bin() && + // Skip binaries with required features that have not been selected. + t.required_features().unwrap_or(&no_required_features).iter().all(|f| { + cx.resolve.features(id).contains(f) + }) + }) + .map(|t| Unit { + pkg: unit.pkg, + target: t, + profile: lib_or_check_profile(unit, t, cx), + kind: unit.kind.for_target(t), + }), + ); + } + Ok(ret) +} + +/// Returns the dependencies needed to run a build script. +/// +/// The `unit` provided must represent an execution of a build script, and +/// the returned set of units must all be run before `unit` is run. +fn compute_deps_custom_build<'a, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, + deps: &mut HashMap, Vec>>, +) -> CargoResult>> { + // When not overridden, then the dependencies to run a build script are: + // + // 1. Compiling the build script itcx + // 2. For each immediate dependency of our package which has a `links` + // key, the execution of that build script. + let not_custom_build = unit.pkg + .targets() + .iter() + .find(|t| !t.is_custom_build()) + .unwrap(); + let tmp = Unit { + target: not_custom_build, + profile: &cx.profiles.dev, + ..*unit + }; + let deps = deps_of(&tmp, cx, deps)?; + Ok(deps.iter() + .filter_map(|unit| { + if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { + return None; + } + dep_build_script(unit, cx) + }) + .chain(Some(Unit { + profile: cx.build_script_profile(unit.pkg.package_id()), + kind: Kind::Host, // build scripts always compiled for the host + ..*unit + })) + .collect()) +} + +/// Returns the dependencies necessary to document a package +fn compute_deps_doc<'a, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, +) -> CargoResult>> { + let deps = cx.resolve + .deps(unit.pkg.package_id()) + .filter(|dep| { + unit.pkg + .dependencies() + .iter() + .filter(|d| d.name() == dep.name()) + .any(|dep| match dep.kind() { + DepKind::Normal => cx.dep_platform_activated(dep, unit.kind), + _ => false, + }) + }) + .map(|dep| cx.get_package(dep)); + + // To document a library, we depend on dependencies actually being + // built. If we're documenting *all* libraries, then we also depend on + // the documentation of the library being built. + let mut ret = Vec::new(); + for dep in deps { + let dep = dep?; + let lib = match dep.targets().iter().find(|t| t.is_lib()) { + Some(lib) => lib, + None => continue, + }; + ret.push(Unit { + pkg: dep, + target: lib, + profile: lib_or_check_profile(unit, lib, cx), + kind: unit.kind.for_target(lib), + }); + if cx.build_config.doc_all { + ret.push(Unit { + pkg: dep, + target: lib, + profile: &cx.profiles.doc, + kind: unit.kind.for_target(lib), + }); + } + } + + // Be sure to build/run the build script for documented libraries as + ret.extend(dep_build_script(unit, cx)); + + // If we document a binary, we need the library available + if unit.target.is_bin() { + ret.extend(maybe_lib(unit, cx)); + } + Ok(ret) +} + +fn maybe_lib<'a, 'cfg>(unit: &Unit<'a>, cx: &Context<'a, 'cfg>) -> Option> { + unit.pkg + .targets() + .iter() + .find(|t| t.linkable()) + .map(|t| Unit { + pkg: unit.pkg, + target: t, + profile: lib_or_check_profile(unit, t, cx), + kind: unit.kind.for_target(t), + }) +} + +/// If a build script is scheduled to be run for the package specified by +/// `unit`, this function will return the unit to run that build script. +/// +/// Overriding a build script simply means that the running of the build +/// script itself doesn't have any dependencies, so even in that case a unit +/// of work is still returned. `None` is only returned if the package has no +/// build script. +fn dep_build_script<'a, 'cfg>(unit: &Unit<'a>, cx: &Context<'a, 'cfg>) -> Option> { + unit.pkg + .targets() + .iter() + .find(|t| t.is_custom_build()) + .map(|t| Unit { + pkg: unit.pkg, + target: t, + profile: &cx.profiles.custom_build, + kind: unit.kind, + }) +} + +fn lib_or_check_profile<'a, 'cfg>( + unit: &Unit, + target: &Target, + cx: &Context<'a, 'cfg>, +) -> &'a Profile { + if !target.is_custom_build() && !target.for_host() + && (unit.profile.check || (unit.profile.doc && !unit.profile.test)) + { + return &cx.profiles.check; + } + cx.lib_profile() +} diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs new file mode 100644 index 000000000..2b36d28a4 --- /dev/null +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -0,0 +1,615 @@ +use std::collections::{BTreeSet, HashSet}; +use std::collections::hash_map::{Entry, HashMap}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::str; +use std::sync::{Arc, Mutex}; + +use core::PackageId; +use util::{Cfg, Freshness}; +use util::errors::{CargoResult, CargoResultExt}; +use util::{self, internal, paths, profile}; +use util::machine_message; + +use super::job::Work; +use super::{fingerprint, Context, Kind, Unit}; + +/// Contains the parsed output of a custom build script. +#[derive(Clone, Debug, Hash)] +pub struct BuildOutput { + /// Paths to pass to rustc with the `-L` flag + pub library_paths: Vec, + /// Names and link kinds of libraries, suitable for the `-l` flag + pub library_links: Vec, + /// Various `--cfg` flags to pass to the compiler + pub cfgs: Vec, + /// Additional environment variables to run the compiler with. + pub env: Vec<(String, String)>, + /// Metadata to pass to the immediate dependencies + pub metadata: Vec<(String, String)>, + /// Paths to trigger a rerun of this build script. + pub rerun_if_changed: Vec, + /// Environment variables which, when changed, will cause a rebuild. + pub rerun_if_env_changed: Vec, + /// Warnings generated by this build, + pub warnings: Vec, +} + +/// Map of packages to build info +pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; + +/// Build info and overrides +pub struct BuildState { + pub outputs: Mutex, + overrides: HashMap<(String, Kind), BuildOutput>, +} + +#[derive(Default)] +pub struct BuildScripts { + // Cargo will use this `to_link` vector to add -L flags to compiles as we + // propagate them upwards towards the final build. Note, however, that we + // need to preserve the ordering of `to_link` to be topologically sorted. + // This will ensure that build scripts which print their paths properly will + // correctly pick up the files they generated (if there are duplicates + // elsewhere). + // + // To preserve this ordering, the (id, kind) is stored in two places, once + // in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain + // this as we're building interactively below to ensure that the memory + // usage here doesn't blow up too much. + // + // For more information, see #2354 + pub to_link: Vec<(PackageId, Kind)>, + seen_to_link: HashSet<(PackageId, Kind)>, + pub plugins: BTreeSet, +} + +pub struct BuildDeps { + pub build_script_output: PathBuf, + pub rerun_if_changed: Vec, + pub rerun_if_env_changed: Vec, +} + +/// Prepares a `Work` that executes the target as a custom build script. +/// +/// The `req` given is the requirement which this run of the build script will +/// prepare work for. If the requirement is specified as both the target and the +/// host platforms it is assumed that the two are equal and the build script is +/// only run once (not twice). +pub fn prepare<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult<(Work, Work, Freshness)> { + let _p = profile::start(format!( + "build script prepare: {}/{}", + unit.pkg, + unit.target.name() + )); + + let key = (unit.pkg.package_id().clone(), unit.kind); + let overridden = cx.build_script_overridden.contains(&key); + let (work_dirty, work_fresh) = if overridden { + (Work::noop(), Work::noop()) + } else { + build_work(cx, unit)? + }; + + // Now that we've prep'd our work, build the work needed to manage the + // fingerprint and then start returning that upwards. + let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?; + + Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness)) +} + +fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> { + assert!(unit.profile.run_custom_build); + let dependencies = cx.dep_targets(unit); + let build_script_unit = dependencies + .iter() + .find(|d| !d.profile.run_custom_build && d.target.is_custom_build()) + .expect("running a script not depending on an actual script"); + let script_output = cx.build_script_dir(build_script_unit); + let build_output = cx.build_script_out_dir(unit); + + // Building the command to execute + let to_exec = script_output.join(unit.target.name()); + + // Start preparing the process to execute, starting out with some + // environment variables. Note that the profile-related environment + // variables are not set with this the build script's profile but rather the + // package's library profile. + let profile = cx.lib_profile(); + let to_exec = to_exec.into_os_string(); + let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; + cmd.env("OUT_DIR", &build_output) + .env("CARGO_MANIFEST_DIR", unit.pkg.root()) + .env("NUM_JOBS", &cx.jobs().to_string()) + .env( + "TARGET", + &match unit.kind { + Kind::Host => cx.host_triple(), + Kind::Target => cx.target_triple(), + }, + ) + .env("DEBUG", &profile.debuginfo.is_some().to_string()) + .env("OPT_LEVEL", &profile.opt_level) + .env( + "PROFILE", + if cx.build_config.release { + "release" + } else { + "debug" + }, + ) + .env("HOST", cx.host_triple()) + .env("RUSTC", &cx.config.rustc()?.path) + .env("RUSTDOC", &*cx.config.rustdoc()?) + .inherit_jobserver(&cx.jobserver); + + if let Some(links) = unit.pkg.manifest().links() { + cmd.env("CARGO_MANIFEST_LINKS", links); + } + + // Be sure to pass along all enabled features for this package, this is the + // last piece of statically known information that we have. + for feat in cx.resolve.features(unit.pkg.package_id()).iter() { + cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); + } + + let mut cfg_map = HashMap::new(); + for cfg in cx.cfg(unit.kind) { + match *cfg { + Cfg::Name(ref n) => { + cfg_map.insert(n.clone(), None); + } + Cfg::KeyPair(ref k, ref v) => { + if let Some(ref mut values) = + *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new())) + { + values.push(v.clone()) + } + } + } + } + for (k, v) in cfg_map { + let k = format!("CARGO_CFG_{}", super::envify(&k)); + match v { + Some(list) => { + cmd.env(&k, list.join(",")); + } + None => { + cmd.env(&k, ""); + } + } + } + + // Gather the set of native dependencies that this package has along with + // some other variables to close over. + // + // This information will be used at build-time later on to figure out which + // sorts of variables need to be discovered at that time. + let lib_deps = { + dependencies + .iter() + .filter_map(|unit| { + if unit.profile.run_custom_build { + Some(( + unit.pkg.manifest().links().unwrap().to_string(), + unit.pkg.package_id().clone(), + )) + } else { + None + } + }) + .collect::>() + }; + let pkg_name = unit.pkg.to_string(); + let build_state = Arc::clone(&cx.build_state); + let id = unit.pkg.package_id().clone(); + let (output_file, err_file, root_output_file) = { + let build_output_parent = build_output.parent().unwrap(); + let output_file = build_output_parent.join("output"); + let err_file = build_output_parent.join("stderr"); + let root_output_file = build_output_parent.join("root-output"); + (output_file, err_file, root_output_file) + }; + let root_output = cx.target_root().to_path_buf(); + let all = ( + id.clone(), + pkg_name.clone(), + Arc::clone(&build_state), + output_file.clone(), + root_output.clone(), + ); + let build_scripts = super::load_build_deps(cx, unit); + let kind = unit.kind; + let json_messages = cx.build_config.json_messages; + + // Check to see if the build script has already run, and if it has keep + // track of whether it has told us about some explicit dependencies + let prev_root_output = paths::read_bytes(&root_output_file) + .and_then(|bytes| util::bytes2path(&bytes)) + .unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf()); + let prev_output = + BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output).ok(); + let deps = BuildDeps::new(&output_file, prev_output.as_ref()); + cx.build_explicit_deps.insert(*unit, deps); + + fs::create_dir_all(&script_output)?; + fs::create_dir_all(&build_output)?; + + // Prepare the unit of "dirty work" which will actually run the custom build + // command. + // + // Note that this has to do some extra work just before running the command + // to determine extra environment variables and such. + let dirty = Work::new(move |state| { + // Make sure that OUT_DIR exists. + // + // If we have an old build directory, then just move it into place, + // otherwise create it! + if fs::metadata(&build_output).is_err() { + fs::create_dir(&build_output).chain_err(|| { + internal( + "failed to create script output directory for \ + build command", + ) + })?; + } + + // For all our native lib dependencies, pick up their metadata to pass + // along to this custom build command. We're also careful to augment our + // dynamic library search path in case the build script depended on any + // native dynamic libraries. + { + let build_state = build_state.outputs.lock().unwrap(); + for (name, id) in lib_deps { + let key = (id.clone(), kind); + let state = build_state.get(&key).ok_or_else(|| { + internal(format!( + "failed to locate build state for env \ + vars: {}/{:?}", + id, kind + )) + })?; + let data = &state.metadata; + for &(ref key, ref value) in data.iter() { + cmd.env( + &format!("DEP_{}_{}", super::envify(&name), super::envify(key)), + value, + ); + } + } + if let Some(build_scripts) = build_scripts { + super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &root_output)?; + } + } + + // And now finally, run the build command itself! + state.running(&cmd); + let output = cmd.exec_with_streaming( + &mut |out_line| { + state.stdout(out_line); + Ok(()) + }, + &mut |err_line| { + state.stderr(err_line); + Ok(()) + }, + true, + ).map_err(|e| { + format_err!( + "failed to run custom build command for `{}`\n{}", + pkg_name, + e + ) + })?; + + // After the build command has finished running, we need to be sure to + // remember all of its output so we can later discover precisely what it + // was, even if we don't run the build command again (due to freshness). + // + // This is also the location where we provide feedback into the build + // state informing what variables were discovered via our script as + // well. + paths::write(&output_file, &output.stdout)?; + paths::write(&err_file, &output.stderr)?; + paths::write(&root_output_file, util::path2bytes(&root_output)?)?; + let parsed_output = + BuildOutput::parse(&output.stdout, &pkg_name, &root_output, &root_output)?; + + if json_messages { + let library_paths = parsed_output + .library_paths + .iter() + .map(|l| l.display().to_string()) + .collect::>(); + machine_message::emit(&machine_message::BuildScript { + package_id: &id, + linked_libs: &parsed_output.library_links, + linked_paths: &library_paths, + cfgs: &parsed_output.cfgs, + env: &parsed_output.env, + }); + } + + build_state.insert(id, kind, parsed_output); + Ok(()) + }); + + // Now that we've prepared our work-to-do, we need to prepare the fresh work + // itself to run when we actually end up just discarding what we calculated + // above. + let fresh = Work::new(move |_tx| { + let (id, pkg_name, build_state, output_file, root_output) = all; + let output = match prev_output { + Some(output) => output, + None => { + BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output)? + } + }; + build_state.insert(id, kind, output); + Ok(()) + }); + + Ok((dirty, fresh)) +} + +impl BuildState { + pub fn new(config: &super::BuildConfig) -> BuildState { + let mut overrides = HashMap::new(); + let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); + let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); + for ((name, output), kind) in i1.chain(i2) { + overrides.insert((name.clone(), kind), output.clone()); + } + BuildState { + outputs: Mutex::new(HashMap::new()), + overrides, + } + } + + fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { + self.outputs.lock().unwrap().insert((id, kind), output); + } +} + +impl BuildOutput { + pub fn parse_file( + path: &Path, + pkg_name: &str, + root_output_when_generated: &Path, + root_output: &Path, + ) -> CargoResult { + let contents = paths::read_bytes(path)?; + BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output) + } + + // Parses the output of a script. + // The `pkg_name` is used for error messages. + pub fn parse( + input: &[u8], + pkg_name: &str, + root_output_when_generated: &Path, + root_output: &Path, + ) -> CargoResult { + let mut library_paths = Vec::new(); + let mut library_links = Vec::new(); + let mut cfgs = Vec::new(); + let mut env = Vec::new(); + let mut metadata = Vec::new(); + let mut rerun_if_changed = Vec::new(); + let mut rerun_if_env_changed = Vec::new(); + let mut warnings = Vec::new(); + let whence = format!("build script of `{}`", pkg_name); + + for line in input.split(|b| *b == b'\n') { + let line = match str::from_utf8(line) { + Ok(line) => line.trim(), + Err(..) => continue, + }; + let mut iter = line.splitn(2, ':'); + if iter.next() != Some("cargo") { + // skip this line since it doesn't start with "cargo:" + continue; + } + let data = match iter.next() { + Some(val) => val, + None => continue, + }; + + // getting the `key=value` part of the line + let mut iter = data.splitn(2, '='); + let key = iter.next(); + let value = iter.next(); + let (key, value) = match (key, value) { + (Some(a), Some(b)) => (a, b.trim_right()), + // line started with `cargo:` but didn't match `key=value` + _ => bail!("Wrong output in {}: `{}`", whence, line), + }; + + let path = |val: &str| match Path::new(val).strip_prefix(root_output_when_generated) { + Ok(path) => root_output.join(path), + Err(_) => PathBuf::from(val), + }; + + match key { + "rustc-flags" => { + let (paths, links) = BuildOutput::parse_rustc_flags(value, &whence)?; + library_links.extend(links.into_iter()); + library_paths.extend(paths.into_iter()); + } + "rustc-link-lib" => library_links.push(value.to_string()), + "rustc-link-search" => library_paths.push(path(value)), + "rustc-cfg" => cfgs.push(value.to_string()), + "rustc-env" => env.push(BuildOutput::parse_rustc_env(value, &whence)?), + "warning" => warnings.push(value.to_string()), + "rerun-if-changed" => rerun_if_changed.push(path(value)), + "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), + _ => metadata.push((key.to_string(), value.to_string())), + } + } + + Ok(BuildOutput { + library_paths, + library_links, + cfgs, + env, + metadata, + rerun_if_changed, + rerun_if_env_changed, + warnings, + }) + } + + pub fn parse_rustc_flags( + value: &str, + whence: &str, + ) -> CargoResult<(Vec, Vec)> { + let value = value.trim(); + let mut flags_iter = value + .split(|c: char| c.is_whitespace()) + .filter(|w| w.chars().any(|c| !c.is_whitespace())); + let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); + while let Some(flag) = flags_iter.next() { + if flag != "-l" && flag != "-L" { + bail!( + "Only `-l` and `-L` flags are allowed in {}: `{}`", + whence, + value + ) + } + let value = match flags_iter.next() { + Some(v) => v, + None => bail!( + "Flag in rustc-flags has no value in {}: `{}`", + whence, + value + ), + }; + match flag { + "-l" => library_links.push(value.to_string()), + "-L" => library_paths.push(PathBuf::from(value)), + + // was already checked above + _ => bail!("only -l and -L flags are allowed"), + }; + } + Ok((library_paths, library_links)) + } + + pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> { + let mut iter = value.splitn(2, '='); + let name = iter.next(); + let val = iter.next(); + match (name, val) { + (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())), + _ => bail!("Variable rustc-env has no value in {}: {}", whence, value), + } + } +} + +impl BuildDeps { + pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { + BuildDeps { + build_script_output: output_file.to_path_buf(), + rerun_if_changed: output + .map(|p| &p.rerun_if_changed) + .cloned() + .unwrap_or_default(), + rerun_if_env_changed: output + .map(|p| &p.rerun_if_env_changed) + .cloned() + .unwrap_or_default(), + } + } +} + +/// Compute the `build_scripts` map in the `Context` which tracks what build +/// scripts each package depends on. +/// +/// The global `build_scripts` map lists for all (package, kind) tuples what set +/// of packages' build script outputs must be considered. For example this lists +/// all dependencies' `-L` flags which need to be propagated transitively. +/// +/// The given set of targets to this function is the initial set of +/// targets/profiles which are being built. +pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> { + let mut ret = HashMap::new(); + for unit in units { + build(&mut ret, cx, unit)?; + } + cx.build_scripts + .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); + return Ok(()); + + // Recursive function to build up the map we're constructing. This function + // memoizes all of its return values as it goes along. + fn build<'a, 'b, 'cfg>( + out: &'a mut HashMap, BuildScripts>, + cx: &mut Context<'b, 'cfg>, + unit: &Unit<'b>, + ) -> CargoResult<&'a BuildScripts> { + // Do a quick pre-flight check to see if we've already calculated the + // set of dependencies. + if out.contains_key(unit) { + return Ok(&out[unit]); + } + + { + let key = unit.pkg + .manifest() + .links() + .map(|l| (l.to_string(), unit.kind)); + let build_state = &cx.build_state; + if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { + let key = (unit.pkg.package_id().clone(), unit.kind); + cx.build_script_overridden.insert(key.clone()); + build_state + .outputs + .lock() + .unwrap() + .insert(key, output.clone()); + } + } + + let mut ret = BuildScripts::default(); + + if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { + add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); + } + + // We want to invoke the compiler deterministically to be cache-friendly + // to rustc invocation caching schemes, so be sure to generate the same + // set of build script dependency orderings via sorting the targets that + // come out of the `Context`. + let mut targets = cx.dep_targets(unit); + targets.sort_by_key(|u| u.pkg.package_id()); + + for unit in targets.iter() { + let dep_scripts = build(out, cx, unit)?; + + if unit.target.for_host() { + ret.plugins + .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned()); + } else if unit.target.linkable() { + for &(ref pkg, kind) in dep_scripts.to_link.iter() { + add_to_link(&mut ret, pkg, kind); + } + } + } + + match out.entry(*unit) { + Entry::Vacant(entry) => Ok(entry.insert(ret)), + Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"), + } + } + + // When adding an entry to 'to_link' we only actually push it on if the + // script hasn't seen it yet (e.g. we don't push on duplicates). + fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) { + if scripts.seen_to_link.insert((pkg.clone(), kind)) { + scripts.to_link.push((pkg.clone(), kind)); + } + } +} diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs new file mode 100644 index 000000000..bd441d358 --- /dev/null +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -0,0 +1,836 @@ +use std::env; +use std::fs; +use std::hash::{self, Hasher}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; + +use filetime::FileTime; +use serde::ser::{self, Serialize}; +use serde::de::{self, Deserialize}; +use serde_json; + +use core::{Epoch, Package, TargetKind}; +use util; +use util::{internal, profile, Dirty, Fresh, Freshness}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; + +use super::job::Work; +use super::context::{Context, TargetFileType, Unit}; +use super::custom_build::BuildDeps; + +/// A tuple result of the `prepare_foo` functions in this module. +/// +/// The first element of the triple is whether the target in question is +/// currently fresh or not, and the second two elements are work to perform when +/// the target is dirty or fresh, respectively. +/// +/// Both units of work are always generated because a fresh package may still be +/// rebuilt if some upstream dependency changes. +pub type Preparation = (Freshness, Work, Work); + +/// Prepare the necessary work for the fingerprint for a specific target. +/// +/// When dealing with fingerprints, cargo gets to choose what granularity +/// "freshness" is considered at. One option is considering freshness at the +/// package level. This means that if anything in a package changes, the entire +/// package is rebuilt, unconditionally. This simplicity comes at a cost, +/// however, in that test-only changes will cause libraries to be rebuilt, which +/// is quite unfortunate! +/// +/// The cost was deemed high enough that fingerprints are now calculated at the +/// layer of a target rather than a package. Each target can then be kept track +/// of separately and only rebuilt as necessary. This requires cargo to +/// understand what the inputs are to a target, so we drive rustc with the +/// --dep-info flag to learn about all input files to a unit of compilation. +/// +/// This function will calculate the fingerprint for a target and prepare the +/// work necessary to either write the fingerprint or copy over all fresh files +/// from the old directories to their new locations. +pub fn prepare_target<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult { + let _p = profile::start(format!( + "fingerprint: {} / {}", + unit.pkg.package_id(), + unit.target.name() + )); + let new = cx.fingerprint_dir(unit); + let loc = new.join(&filename(cx, unit)); + + debug!("fingerprint at: {}", loc.display()); + + let fingerprint = calculate(cx, unit)?; + let compare = compare_old_fingerprint(&loc, &*fingerprint); + log_compare(unit, &compare); + + // If our comparison failed (e.g. we're going to trigger a rebuild of this + // crate), then we also ensure the source of the crate passes all + // verification checks before we build it. + // + // The `Source::verify` method is intended to allow sources to execute + // pre-build checks to ensure that the relevant source code is all + // up-to-date and as expected. This is currently used primarily for + // directory sources which will use this hook to perform an integrity check + // on all files in the source to ensure they haven't changed. If they have + // changed then an error is issued. + if compare.is_err() { + let source_id = unit.pkg.package_id().source_id(); + let sources = cx.packages.sources(); + let source = sources + .get(source_id) + .ok_or_else(|| internal("missing package source"))?; + source.verify(unit.pkg.package_id())?; + } + + let root = cx.out_dir(unit); + let mut missing_outputs = false; + if unit.profile.doc { + missing_outputs = !root.join(unit.target.crate_name()) + .join("index.html") + .exists(); + } else { + for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() { + if file_type == TargetFileType::DebugInfo { + continue; + } + missing_outputs |= !src.exists(); + if let Some(ref link_dst) = *link_dst { + missing_outputs |= !link_dst.exists(); + } + } + } + + let allow_failure = unit.profile.rustc_args.is_some(); + let target_root = cx.target_root().to_path_buf(); + let write_fingerprint = Work::new(move |_| { + match fingerprint.update_local(&target_root) { + Ok(()) => {} + Err(..) if allow_failure => return Ok(()), + Err(e) => return Err(e), + } + write_fingerprint(&loc, &*fingerprint) + }); + + let fresh = compare.is_ok() && !missing_outputs; + Ok(( + if fresh { Fresh } else { Dirty }, + write_fingerprint, + Work::noop(), + )) +} + +/// A fingerprint can be considered to be a "short string" representing the +/// state of a world for a package. +/// +/// If a fingerprint ever changes, then the package itself needs to be +/// recompiled. Inputs to the fingerprint include source code modifications, +/// compiler flags, compiler version, etc. This structure is not simply a +/// `String` due to the fact that some fingerprints cannot be calculated lazily. +/// +/// Path sources, for example, use the mtime of the corresponding dep-info file +/// as a fingerprint (all source files must be modified *before* this mtime). +/// This dep-info file is not generated, however, until after the crate is +/// compiled. As a result, this structure can be thought of as a fingerprint +/// to-be. The actual value can be calculated via `hash()`, but the operation +/// may fail as some files may not have been generated. +/// +/// Note that dependencies are taken into account for fingerprints because rustc +/// requires that whenever an upstream crate is recompiled that all downstream +/// dependants are also recompiled. This is typically tracked through +/// `DependencyQueue`, but it also needs to be retained here because Cargo can +/// be interrupted while executing, losing the state of the `DependencyQueue` +/// graph. +#[derive(Serialize, Deserialize)] +pub struct Fingerprint { + rustc: u64, + features: String, + target: u64, + profile: u64, + path: u64, + #[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")] + deps: Vec<(String, Arc)>, + local: Vec, + #[serde(skip_serializing, skip_deserializing)] memoized_hash: Mutex>, + rustflags: Vec, + epoch: Epoch, +} + +fn serialize_deps(deps: &[(String, Arc)], ser: S) -> Result +where + S: ser::Serializer, +{ + deps.iter() + .map(|&(ref a, ref b)| (a, b.hash())) + .collect::>() + .serialize(ser) +} + +fn deserialize_deps<'de, D>(d: D) -> Result)>, D::Error> +where + D: de::Deserializer<'de>, +{ + let decoded = >::deserialize(d)?; + Ok(decoded + .into_iter() + .map(|(name, hash)| { + ( + name, + Arc::new(Fingerprint { + rustc: 0, + target: 0, + profile: 0, + path: 0, + local: vec![LocalFingerprint::Precalculated(String::new())], + features: String::new(), + deps: Vec::new(), + memoized_hash: Mutex::new(Some(hash)), + epoch: Epoch::Epoch2015, + rustflags: Vec::new(), + }), + ) + }) + .collect()) +} + +#[derive(Serialize, Deserialize, Hash)] +enum LocalFingerprint { + Precalculated(String), + MtimeBased(MtimeSlot, PathBuf), + EnvBased(String, Option), +} + +impl LocalFingerprint { + fn mtime(root: &Path, mtime: Option, path: &Path) -> LocalFingerprint { + let mtime = MtimeSlot(Mutex::new(mtime)); + assert!(path.is_absolute()); + let path = path.strip_prefix(root).unwrap_or(path); + LocalFingerprint::MtimeBased(mtime, path.to_path_buf()) + } +} + +struct MtimeSlot(Mutex>); + +impl Fingerprint { + fn update_local(&self, root: &Path) -> CargoResult<()> { + let mut hash_busted = false; + for local in self.local.iter() { + match *local { + LocalFingerprint::MtimeBased(ref slot, ref path) => { + let path = root.join(path); + let meta = fs::metadata(&path) + .chain_err(|| internal(format!("failed to stat `{}`", path.display())))?; + let mtime = FileTime::from_last_modification_time(&meta); + *slot.0.lock().unwrap() = Some(mtime); + } + LocalFingerprint::EnvBased(..) | LocalFingerprint::Precalculated(..) => continue, + } + hash_busted = true; + } + + if hash_busted { + *self.memoized_hash.lock().unwrap() = None; + } + Ok(()) + } + + fn hash(&self) -> u64 { + if let Some(s) = *self.memoized_hash.lock().unwrap() { + return s; + } + let ret = util::hash_u64(self); + *self.memoized_hash.lock().unwrap() = Some(ret); + ret + } + + fn compare(&self, old: &Fingerprint) -> CargoResult<()> { + if self.rustc != old.rustc { + bail!("rust compiler has changed") + } + if self.features != old.features { + bail!( + "features have changed: {} != {}", + self.features, + old.features + ) + } + if self.target != old.target { + bail!("target configuration has changed") + } + if self.path != old.path { + bail!("path to the compiler has changed") + } + if self.profile != old.profile { + bail!("profile configuration has changed") + } + if self.rustflags != old.rustflags { + bail!("RUSTFLAGS has changed") + } + if self.local.len() != old.local.len() { + bail!("local lens changed"); + } + if self.epoch != old.epoch { + bail!("epoch changed") + } + for (new, old) in self.local.iter().zip(&old.local) { + match (new, old) { + ( + &LocalFingerprint::Precalculated(ref a), + &LocalFingerprint::Precalculated(ref b), + ) => { + if a != b { + bail!("precalculated components have changed: {} != {}", a, b) + } + } + ( + &LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap), + &LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp), + ) => { + let on_disk_mtime = on_disk_mtime.0.lock().unwrap(); + let previously_built_mtime = previously_built_mtime.0.lock().unwrap(); + + let should_rebuild = match (*on_disk_mtime, *previously_built_mtime) { + (None, None) => false, + (Some(_), None) | (None, Some(_)) => true, + (Some(on_disk), Some(previously_built)) => on_disk > previously_built, + }; + + if should_rebuild { + bail!( + "mtime based components have changed: previously {:?} now {:?}, \ + paths are {:?} and {:?}", + *previously_built_mtime, + *on_disk_mtime, + ap, + bp + ) + } + } + ( + &LocalFingerprint::EnvBased(ref akey, ref avalue), + &LocalFingerprint::EnvBased(ref bkey, ref bvalue), + ) => { + if *akey != *bkey { + bail!("env vars changed: {} != {}", akey, bkey); + } + if *avalue != *bvalue { + bail!( + "env var `{}` changed: previously {:?} now {:?}", + akey, + bvalue, + avalue + ) + } + } + _ => bail!("local fingerprint type has changed"), + } + } + + if self.deps.len() != old.deps.len() { + bail!("number of dependencies has changed") + } + for (a, b) in self.deps.iter().zip(old.deps.iter()) { + if a.1.hash() != b.1.hash() { + bail!("new ({}) != old ({})", a.0, b.0) + } + } + Ok(()) + } +} + +impl hash::Hash for Fingerprint { + fn hash(&self, h: &mut H) { + let Fingerprint { + rustc, + ref features, + target, + path, + profile, + ref deps, + ref local, + epoch, + ref rustflags, + .. + } = *self; + ( + rustc, + features, + target, + path, + profile, + local, + epoch, + rustflags, + ).hash(h); + + h.write_usize(deps.len()); + for &(ref name, ref fingerprint) in deps { + name.hash(h); + // use memoized dep hashes to avoid exponential blowup + h.write_u64(Fingerprint::hash(fingerprint)); + } + } +} + +impl hash::Hash for MtimeSlot { + fn hash(&self, h: &mut H) { + self.0.lock().unwrap().hash(h) + } +} + +impl ser::Serialize for MtimeSlot { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.0 + .lock() + .unwrap() + .map(|ft| (ft.seconds_relative_to_1970(), ft.nanoseconds())) + .serialize(s) + } +} + +impl<'de> de::Deserialize<'de> for MtimeSlot { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?; + Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| { + FileTime::from_seconds_since_1970(s, n) + })))) + } +} + +/// Calculates the fingerprint for a package/target pair. +/// +/// This fingerprint is used by Cargo to learn about when information such as: +/// +/// * A non-path package changes (changes version, changes revision, etc). +/// * Any dependency changes +/// * The compiler changes +/// * The set of features a package is built with changes +/// * The profile a target is compiled with changes (e.g. opt-level changes) +/// +/// Information like file modification time is only calculated for path +/// dependencies and is calculated in `calculate_target_fresh`. +fn calculate<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult> { + if let Some(s) = cx.fingerprints.get(unit) { + return Ok(Arc::clone(s)); + } + + // Next, recursively calculate the fingerprint for all of our dependencies. + // + // Skip the fingerprints of build scripts as they may not always be + // available and the dirtiness propagation for modification is tracked + // elsewhere. Also skip fingerprints of binaries because they don't actually + // induce a recompile, they're just dependencies in the sense that they need + // to be built. + let deps = cx.dep_targets(unit); + let deps = deps.iter() + .filter(|u| !u.target.is_custom_build() && !u.target.is_bin()) + .map(|unit| { + calculate(cx, unit).map(|fingerprint| (unit.pkg.package_id().to_string(), fingerprint)) + }) + .collect::>>()?; + + // And finally, calculate what our own local fingerprint is + let local = if use_dep_info(unit) { + let dep_info = dep_info_loc(cx, unit); + let mtime = dep_info_mtime_if_fresh(unit.pkg, &dep_info)?; + LocalFingerprint::mtime(cx.target_root(), mtime, &dep_info) + } else { + let fingerprint = pkg_fingerprint(cx, unit.pkg)?; + LocalFingerprint::Precalculated(fingerprint) + }; + let mut deps = deps; + deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); + let extra_flags = if unit.profile.doc { + cx.rustdocflags_args(unit)? + } else { + cx.rustflags_args(unit)? + }; + let fingerprint = Arc::new(Fingerprint { + rustc: util::hash_u64(&cx.config.rustc()?.verbose_version), + target: util::hash_u64(&unit.target), + profile: util::hash_u64(&(&unit.profile, cx.incremental_args(unit)?)), + // Note that .0 is hashed here, not .1 which is the cwd. That doesn't + // actually affect the output artifact so there's no need to hash it. + path: util::hash_u64(&super::path_args(cx, unit).0), + features: format!("{:?}", cx.resolve.features_sorted(unit.pkg.package_id())), + deps, + local: vec![local], + memoized_hash: Mutex::new(None), + epoch: unit.pkg.manifest().epoch(), + rustflags: extra_flags, + }); + cx.fingerprints.insert(*unit, Arc::clone(&fingerprint)); + Ok(fingerprint) +} + +// We want to use the mtime for files if we're a path source, but if we're a +// git/registry source, then the mtime of files may fluctuate, but they won't +// change so long as the source itself remains constant (which is the +// responsibility of the source) +fn use_dep_info(unit: &Unit) -> bool { + let path = unit.pkg.summary().source_id().is_path(); + !unit.profile.doc && path +} + +/// Prepare the necessary work for the fingerprint of a build command. +/// +/// Build commands are located on packages, not on targets. Additionally, we +/// don't have --dep-info to drive calculation of the fingerprint of a build +/// command. This brings up an interesting predicament which gives us a few +/// options to figure out whether a build command is dirty or not: +/// +/// 1. A build command is dirty if *any* file in a package changes. In theory +/// all files are candidate for being used by the build command. +/// 2. A build command is dirty if any file in a *specific directory* changes. +/// This may lose information as it may require files outside of the specific +/// directory. +/// 3. A build command must itself provide a dep-info-like file stating how it +/// should be considered dirty or not. +/// +/// The currently implemented solution is option (1), although it is planned to +/// migrate to option (2) in the near future. +pub fn prepare_build_cmd<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult { + let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id())); + let new = cx.fingerprint_dir(unit); + let loc = new.join("build"); + + debug!("fingerprint at: {}", loc.display()); + + let (local, output_path) = build_script_local_fingerprints(cx, unit)?; + let mut fingerprint = Fingerprint { + rustc: 0, + target: 0, + profile: 0, + path: 0, + features: String::new(), + deps: Vec::new(), + local, + memoized_hash: Mutex::new(None), + epoch: Epoch::Epoch2015, + rustflags: Vec::new(), + }; + let compare = compare_old_fingerprint(&loc, &fingerprint); + log_compare(unit, &compare); + + // When we write out the fingerprint, we may want to actually change the + // kind of fingerprint being recorded. If we started out, then the previous + // run of the build script (or if it had never run before) may indicate to + // use the `Precalculated` variant with the `pkg_fingerprint`. If the build + // script then prints `rerun-if-changed`, however, we need to record what's + // necessary for that fingerprint. + // + // Hence, if there were some `rerun-if-changed` directives forcibly change + // the kind of fingerprint by reinterpreting the dependencies output by the + // build script. + let state = Arc::clone(&cx.build_state); + let key = (unit.pkg.package_id().clone(), unit.kind); + let pkg_root = unit.pkg.root().to_path_buf(); + let target_root = cx.target_root().to_path_buf(); + let write_fingerprint = Work::new(move |_| { + if let Some(output_path) = output_path { + let outputs = state.outputs.lock().unwrap(); + let outputs = &outputs[&key]; + if !outputs.rerun_if_changed.is_empty() || !outputs.rerun_if_env_changed.is_empty() { + let deps = BuildDeps::new(&output_path, Some(outputs)); + fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root); + fingerprint.update_local(&target_root)?; + } + } + write_fingerprint(&loc, &fingerprint) + }); + + Ok(( + if compare.is_ok() { Fresh } else { Dirty }, + write_fingerprint, + Work::noop(), + )) +} + +fn build_script_local_fingerprints<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult<(Vec, Option)> { + let state = cx.build_state.outputs.lock().unwrap(); + // First up, if this build script is entirely overridden, then we just + // return the hash of what we overrode it with. + // + // Note that the `None` here means that we don't want to update the local + // fingerprint afterwards because this is all just overridden. + if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) { + debug!("override local fingerprints deps"); + let s = format!( + "overridden build state with hash: {}", + util::hash_u64(output) + ); + return Ok((vec![LocalFingerprint::Precalculated(s)], None)); + } + + // Next up we look at the previously listed dependencies for the build + // script. If there are none then we're in the "old mode" where we just + // assume that we're changed if anything in the packaged changed. The + // `Some` here though means that we want to update our local fingerprints + // after we're done as running this build script may have created more + // dependencies. + let deps = &cx.build_explicit_deps[unit]; + let output = deps.build_script_output.clone(); + if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { + debug!("old local fingerprints deps"); + let s = pkg_fingerprint(cx, unit.pkg)?; + return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output))); + } + + // Ok so now we're in "new mode" where we can have files listed as + // dependencies as well as env vars listed as dependencies. Process them all + // here. + Ok(( + local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()), + Some(output), + )) +} + +fn local_fingerprints_deps( + deps: &BuildDeps, + target_root: &Path, + pkg_root: &Path, +) -> Vec { + debug!("new local fingerprints deps"); + let mut local = Vec::new(); + if !deps.rerun_if_changed.is_empty() { + let output = &deps.build_script_output; + let deps = deps.rerun_if_changed.iter().map(|p| pkg_root.join(p)); + let mtime = mtime_if_fresh(output, deps); + local.push(LocalFingerprint::mtime(target_root, mtime, output)); + } + + for var in deps.rerun_if_env_changed.iter() { + let val = env::var(var).ok(); + local.push(LocalFingerprint::EnvBased(var.clone(), val)); + } + + local +} + +fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { + let hash = fingerprint.hash(); + debug!("write fingerprint: {}", loc.display()); + paths::write(loc, util::to_hex(hash).as_bytes())?; + paths::write( + &loc.with_extension("json"), + &serde_json::to_vec(&fingerprint).unwrap(), + )?; + Ok(()) +} + +/// Prepare for work when a package starts to build +pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> { + let new1 = cx.fingerprint_dir(unit); + + if fs::metadata(&new1).is_err() { + fs::create_dir(&new1)?; + } + + Ok(()) +} + +pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { + cx.fingerprint_dir(unit) + .join(&format!("dep-{}", filename(cx, unit))) +} + +fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> { + let old_fingerprint_short = paths::read(loc)?; + let new_hash = new_fingerprint.hash(); + + if util::to_hex(new_hash) == old_fingerprint_short { + return Ok(()); + } + + let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; + let old_fingerprint = serde_json::from_str(&old_fingerprint_json) + .chain_err(|| internal("failed to deserialize json"))?; + new_fingerprint.compare(&old_fingerprint) +} + +fn log_compare(unit: &Unit, compare: &CargoResult<()>) { + let ce = match *compare { + Ok(..) => return, + Err(ref e) => e, + }; + info!("fingerprint error for {}: {}", unit.pkg, ce); + + for cause in ce.causes().skip(1) { + info!(" cause: {}", cause); + } +} + +// Parse the dep-info into a list of paths +pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult>> { + let data = match paths::read_bytes(dep_info) { + Ok(data) => data, + Err(_) => return Ok(None), + }; + let paths = data.split(|&x| x == 0) + .filter(|x| !x.is_empty()) + .map(|p| util::bytes2path(p).map(|p| pkg.root().join(p))) + .collect::, _>>()?; + if paths.is_empty() { + Ok(None) + } else { + Ok(Some(paths)) + } +} + +fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult> { + if let Some(paths) = parse_dep_info(pkg, dep_info)? { + Ok(mtime_if_fresh(dep_info, paths.iter())) + } else { + Ok(None) + } +} + +fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { + let source_id = pkg.package_id().source_id(); + let sources = cx.packages.sources(); + + let source = sources + .get(source_id) + .ok_or_else(|| internal("missing package source"))?; + source.fingerprint(pkg) +} + +fn mtime_if_fresh(output: &Path, paths: I) -> Option +where + I: IntoIterator, + I::Item: AsRef, +{ + let meta = match fs::metadata(output) { + Ok(meta) => meta, + Err(..) => return None, + }; + let mtime = FileTime::from_last_modification_time(&meta); + + let any_stale = paths.into_iter().any(|path| { + let path = path.as_ref(); + let meta = match fs::metadata(path) { + Ok(meta) => meta, + Err(..) => { + info!("stale: {} -- missing", path.display()); + return true; + } + }; + let mtime2 = FileTime::from_last_modification_time(&meta); + if mtime2 > mtime { + info!("stale: {} -- {} vs {}", path.display(), mtime2, mtime); + true + } else { + false + } + }); + + if any_stale { + None + } else { + Some(mtime) + } +} + +fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { + // file_stem includes metadata hash. Thus we have a different + // fingerprint for every metadata hash version. This works because + // even if the package is fresh, we'll still link the fresh target + let file_stem = cx.file_stem(unit); + let kind = match *unit.target.kind() { + TargetKind::Lib(..) => "lib", + TargetKind::Bin => "bin", + TargetKind::Test => "integration-test", + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example", + TargetKind::Bench => "bench", + TargetKind::CustomBuild => "build-script", + }; + let flavor = if unit.profile.test { + "test-" + } else if unit.profile.doc { + "doc-" + } else { + "" + }; + format!("{}{}-{}", flavor, kind, file_stem) +} + +/// Parses the dep-info file coming out of rustc into a Cargo-specific format. +/// +/// This function will parse `rustc_dep_info` as a makefile-style dep info to +/// learn about the all files which a crate depends on. This is then +/// re-serialized into the `cargo_dep_info` path in a Cargo-specific format. +/// +/// The `pkg_root` argument here is the absolute path to the directory +/// containing `Cargo.toml` for this crate that was compiled. The paths listed +/// in the rustc dep-info file may or may not be absolute but we'll want to +/// consider all of them relative to the `root` specified. +/// +/// The `rustc_cwd` argument is the absolute path to the cwd of the compiler +/// when it was invoked. +/// +/// The serialized Cargo format will contain a list of files, all of which are +/// relative if they're under `root`. or absolute if they're elsewehre. +pub fn translate_dep_info( + rustc_dep_info: &Path, + cargo_dep_info: &Path, + pkg_root: &Path, + rustc_cwd: &Path, +) -> CargoResult<()> { + let target = parse_rustc_dep_info(rustc_dep_info)?; + let deps = &target + .get(0) + .ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))? + .1; + + let mut new_contents = Vec::new(); + for file in deps { + let absolute = rustc_cwd.join(file); + let path = absolute.strip_prefix(pkg_root).unwrap_or(&absolute); + new_contents.extend(util::path2bytes(path)?); + new_contents.push(0); + } + paths::write(cargo_dep_info, &new_contents)?; + Ok(()) +} + +pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult)>> { + let contents = paths::read(rustc_dep_info)?; + contents + .lines() + .filter_map(|l| l.find(": ").map(|i| (l, i))) + .map(|(line, pos)| { + let target = &line[..pos]; + let mut deps = line[pos + 2..].split_whitespace(); + + let mut ret = Vec::new(); + while let Some(s) = deps.next() { + let mut file = s.to_string(); + while file.ends_with('\\') { + file.pop(); + file.push(' '); + file.push_str(deps.next().ok_or_else(|| { + internal("malformed dep-info format, trailing \\".to_string()) + })?); + } + ret.push(file); + } + Ok((target.to_string(), ret)) + }) + .collect() +} diff --git a/src/cargo/ops/cargo_rustc/job.rs b/src/cargo/ops/cargo_rustc/job.rs new file mode 100644 index 000000000..61e979f1d --- /dev/null +++ b/src/cargo/ops/cargo_rustc/job.rs @@ -0,0 +1,71 @@ +use std::fmt; + +use util::{CargoResult, Dirty, Fresh, Freshness}; +use super::job_queue::JobState; + +pub struct Job { + dirty: Work, + fresh: Work, +} + +/// Each proc should send its description before starting. +/// It should send either once or close immediately. +pub struct Work { + inner: Box FnBox<&'a JobState<'b>, CargoResult<()>> + Send>, +} + +trait FnBox { + fn call_box(self: Box, a: A) -> R; +} + +impl R> FnBox for F { + fn call_box(self: Box, a: A) -> R { + (*self)(a) + } +} + +impl Work { + pub fn new(f: F) -> Work + where + F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static, + { + Work { inner: Box::new(f) } + } + + pub fn noop() -> Work { + Work::new(|_| Ok(())) + } + + pub fn call(self, tx: &JobState) -> CargoResult<()> { + self.inner.call_box(tx) + } + + pub fn then(self, next: Work) -> Work { + Work::new(move |state| { + self.call(state)?; + next.call(state) + }) + } +} + +impl Job { + /// Create a new job representing a unit of work. + pub fn new(dirty: Work, fresh: Work) -> Job { + Job { dirty, fresh } + } + + /// Consumes this job by running it, returning the result of the + /// computation. + pub fn run(self, fresh: Freshness, state: &JobState) -> CargoResult<()> { + match fresh { + Fresh => self.fresh.call(state), + Dirty => self.dirty.call(state), + } + } +} + +impl fmt::Debug for Job { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Job {{ ... }}") + } +} diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs new file mode 100644 index 000000000..cf4c93625 --- /dev/null +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -0,0 +1,443 @@ +use std::collections::HashSet; +use std::collections::hash_map::HashMap; +use std::fmt; +use std::io; +use std::mem; +use std::sync::mpsc::{channel, Receiver, Sender}; + +use crossbeam::{self, Scope}; +use jobserver::{Acquired, HelperThread}; + +use core::{PackageId, Profile, Target}; +use util::{Config, DependencyQueue, Dirty, Fresh, Freshness}; +use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder}; +use handle_error; + +use super::{Context, Kind, Unit}; +use super::job::Job; + +/// A management structure of the entire dependency graph to compile. +/// +/// This structure is backed by the `DependencyQueue` type and manages the +/// actual compilation step of each package. Packages enqueue units of work and +/// then later on the entire graph is processed and compiled. +pub struct JobQueue<'a> { + queue: DependencyQueue, Vec<(Job, Freshness)>>, + tx: Sender>, + rx: Receiver>, + active: usize, + pending: HashMap, PendingBuild>, + compiled: HashSet<&'a PackageId>, + documented: HashSet<&'a PackageId>, + counts: HashMap<&'a PackageId, usize>, + is_release: bool, +} + +/// A helper structure for metadata about the state of a building package. +struct PendingBuild { + /// Number of jobs currently active + amt: usize, + /// Current freshness state of this package. Any dirty target within a + /// package will cause the entire package to become dirty. + fresh: Freshness, +} + +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +struct Key<'a> { + pkg: &'a PackageId, + target: &'a Target, + profile: &'a Profile, + kind: Kind, +} + +pub struct JobState<'a> { + tx: Sender>, +} + +enum Message<'a> { + Run(String), + Stdout(String), + Stderr(String), + Token(io::Result), + Finish(Key<'a>, CargoResult<()>), +} + +impl<'a> JobState<'a> { + pub fn running(&self, cmd: &ProcessBuilder) { + let _ = self.tx.send(Message::Run(cmd.to_string())); + } + + pub fn stdout(&self, out: &str) { + let _ = self.tx.send(Message::Stdout(out.to_string())); + } + + pub fn stderr(&self, err: &str) { + let _ = self.tx.send(Message::Stderr(err.to_string())); + } +} + +impl<'a> JobQueue<'a> { + pub fn new<'cfg>(cx: &Context<'a, 'cfg>) -> JobQueue<'a> { + let (tx, rx) = channel(); + JobQueue { + queue: DependencyQueue::new(), + tx, + rx, + active: 0, + pending: HashMap::new(), + compiled: HashSet::new(), + documented: HashSet::new(), + counts: HashMap::new(), + is_release: cx.build_config.release, + } + } + + pub fn enqueue<'cfg>( + &mut self, + cx: &Context<'a, 'cfg>, + unit: &Unit<'a>, + job: Job, + fresh: Freshness, + ) -> CargoResult<()> { + let key = Key::new(unit); + let deps = key.dependencies(cx)?; + self.queue + .queue(Fresh, key, Vec::new(), &deps) + .push((job, fresh)); + *self.counts.entry(key.pkg).or_insert(0) += 1; + Ok(()) + } + + /// Execute all jobs necessary to build the dependency graph. + /// + /// This function will spawn off `config.jobs()` workers to build all of the + /// necessary dependencies, in order. Freshness is propagated as far as + /// possible along each dependency chain. + pub fn execute(&mut self, cx: &mut Context) -> CargoResult<()> { + let _p = profile::start("executing the job graph"); + self.queue.queue_finished(); + + // We need to give a handle to the send half of our message queue to the + // jobserver helper thread. Unfortunately though we need the handle to be + // `'static` as that's typically what's required when spawning a + // thread! + // + // To work around this we transmute the `Sender` to a static lifetime. + // we're only sending "longer living" messages and we should also + // destroy all references to the channel before this function exits as + // the destructor for the `helper` object will ensure the associated + // thread is no longer running. + // + // As a result, this `transmute` to a longer lifetime should be safe in + // practice. + let tx = self.tx.clone(); + let tx = unsafe { mem::transmute::>, Sender>>(tx) }; + let helper = cx.jobserver + .clone() + .into_helper_thread(move |token| { + drop(tx.send(Message::Token(token))); + }) + .chain_err(|| "failed to create helper thread for jobserver management")?; + + crossbeam::scope(|scope| self.drain_the_queue(cx, scope, &helper)) + } + + fn drain_the_queue( + &mut self, + cx: &mut Context, + scope: &Scope<'a>, + jobserver_helper: &HelperThread, + ) -> CargoResult<()> { + use std::time::Instant; + + let mut tokens = Vec::new(); + let mut queue = Vec::new(); + trace!("queue: {:#?}", self.queue); + + // Iteratively execute the entire dependency graph. Each turn of the + // loop starts out by scheduling as much work as possible (up to the + // maximum number of parallel jobs we have tokens for). A local queue + // is maintained separately from the main dependency queue as one + // dequeue may actually dequeue quite a bit of work (e.g. 10 binaries + // in one project). + // + // After a job has finished we update our internal state if it was + // successful and otherwise wait for pending work to finish if it failed + // and then immediately return. + let mut error = None; + let start_time = Instant::now(); + loop { + // Dequeue as much work as we can, learning about everything + // possible that can run. Note that this is also the point where we + // start requesting job tokens. Each job after the first needs to + // request a token. + while let Some((fresh, key, jobs)) = self.queue.dequeue() { + let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| f.combine(fresh)); + self.pending.insert( + key, + PendingBuild { + amt: jobs.len(), + fresh: total_fresh, + }, + ); + for (job, f) in jobs { + queue.push((key, job, f.combine(fresh))); + if self.active + queue.len() > 0 { + jobserver_helper.request_token(); + } + } + } + + // Now that we've learned of all possible work that we can execute + // try to spawn it so long as we've got a jobserver token which says + // we're able to perform some parallel work. + while error.is_none() && self.active < tokens.len() + 1 && !queue.is_empty() { + let (key, job, fresh) = queue.remove(0); + self.run(key, fresh, job, cx.config, scope)?; + } + + // If after all that we're not actually running anything then we're + // done! + if self.active == 0 { + break; + } + + // And finally, before we block waiting for the next event, drop any + // excess tokens we may have accidentally acquired. Due to how our + // jobserver interface is architected we may acquire a token that we + // don't actually use, and if this happens just relinquish it back + // to the jobserver itself. + tokens.truncate(self.active - 1); + + match self.rx.recv().unwrap() { + Message::Run(cmd) => { + cx.config.shell().verbose(|c| c.status("Running", &cmd))?; + } + Message::Stdout(out) => { + if cx.config.extra_verbose() { + println!("{}", out); + } + } + Message::Stderr(err) => { + if cx.config.extra_verbose() { + writeln!(cx.config.shell().err(), "{}", err)?; + } + } + Message::Finish(key, result) => { + info!("end: {:?}", key); + self.active -= 1; + if self.active > 0 { + assert!(!tokens.is_empty()); + drop(tokens.pop()); + } + match result { + Ok(()) => self.finish(key, cx)?, + Err(e) => { + let msg = "The following warnings were emitted during compilation:"; + self.emit_warnings(Some(msg), key, cx)?; + + if self.active > 0 { + error = Some(format_err!("build failed")); + handle_error(e, &mut *cx.config.shell()); + cx.config.shell().warn( + "build failed, waiting for other \ + jobs to finish...", + )?; + } else { + error = Some(e); + } + } + } + } + Message::Token(acquired_token) => { + tokens.push(acquired_token.chain_err(|| "failed to acquire jobserver token")?); + } + } + } + + let build_type = if self.is_release { "release" } else { "dev" }; + let profile = cx.lib_profile(); + let mut opt_type = String::from(if profile.opt_level == "0" { + "unoptimized" + } else { + "optimized" + }); + if profile.debuginfo.is_some() { + opt_type += " + debuginfo"; + } + let duration = start_time.elapsed(); + let time_elapsed = format!( + "{}.{1:.2} secs", + duration.as_secs(), + duration.subsec_nanos() / 10_000_000 + ); + if self.queue.is_empty() { + let message = format!( + "{} [{}] target(s) in {}", + build_type, opt_type, time_elapsed + ); + cx.config.shell().status("Finished", message)?; + Ok(()) + } else if let Some(e) = error { + Err(e) + } else { + debug!("queue: {:#?}", self.queue); + Err(internal("finished with jobs still left in the queue")) + } + } + + /// Executes a job in the `scope` given, pushing the spawned thread's + /// handled onto `threads`. + fn run( + &mut self, + key: Key<'a>, + fresh: Freshness, + job: Job, + config: &Config, + scope: &Scope<'a>, + ) -> CargoResult<()> { + info!("start: {:?}", key); + + self.active += 1; + *self.counts.get_mut(key.pkg).unwrap() -= 1; + + let my_tx = self.tx.clone(); + let doit = move || { + let res = job.run(fresh, &JobState { tx: my_tx.clone() }); + my_tx.send(Message::Finish(key, res)).unwrap(); + }; + match fresh { + Freshness::Fresh => doit(), + Freshness::Dirty => { + scope.spawn(doit); + } + } + + // Print out some nice progress information + self.note_working_on(config, &key, fresh)?; + + Ok(()) + } + + fn emit_warnings(&self, msg: Option<&str>, key: Key<'a>, cx: &mut Context) -> CargoResult<()> { + let output = cx.build_state.outputs.lock().unwrap(); + if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) { + if let Some(msg) = msg { + if !output.warnings.is_empty() { + writeln!(cx.config.shell().err(), "{}\n", msg)?; + } + } + + for warning in output.warnings.iter() { + cx.config.shell().warn(warning)?; + } + + if !output.warnings.is_empty() && msg.is_some() { + // Output an empty line. + writeln!(cx.config.shell().err(), "")?; + } + } + + Ok(()) + } + + fn finish(&mut self, key: Key<'a>, cx: &mut Context) -> CargoResult<()> { + if key.profile.run_custom_build && cx.show_warnings(key.pkg) { + self.emit_warnings(None, key, cx)?; + } + + let state = self.pending.get_mut(&key).unwrap(); + state.amt -= 1; + if state.amt == 0 { + self.queue.finish(&key, state.fresh); + } + Ok(()) + } + + // This isn't super trivial because we don't want to print loads and + // loads of information to the console, but we also want to produce a + // faithful representation of what's happening. This is somewhat nuanced + // as a package can start compiling *very* early on because of custom + // build commands and such. + // + // In general, we try to print "Compiling" for the first nontrivial task + // run for a package, regardless of when that is. We then don't print + // out any more information for a package after we've printed it once. + fn note_working_on( + &mut self, + config: &Config, + key: &Key<'a>, + fresh: Freshness, + ) -> CargoResult<()> { + if (self.compiled.contains(key.pkg) && !key.profile.doc) + || (self.documented.contains(key.pkg) && key.profile.doc) + { + return Ok(()); + } + + match fresh { + // Any dirty stage which runs at least one command gets printed as + // being a compiled package + Dirty => { + if key.profile.doc { + if !key.profile.test { + self.documented.insert(key.pkg); + config.shell().status("Documenting", key.pkg)?; + } + } else { + self.compiled.insert(key.pkg); + config.shell().status("Compiling", key.pkg)?; + } + } + Fresh if self.counts[key.pkg] == 0 => { + self.compiled.insert(key.pkg); + config.shell().verbose(|c| c.status("Fresh", key.pkg))?; + } + Fresh => {} + } + Ok(()) + } +} + +impl<'a> Key<'a> { + fn new(unit: &Unit<'a>) -> Key<'a> { + Key { + pkg: unit.pkg.package_id(), + target: unit.target, + profile: unit.profile, + kind: unit.kind, + } + } + + fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult>> { + let unit = Unit { + pkg: cx.get_package(self.pkg)?, + target: self.target, + profile: self.profile, + kind: self.kind, + }; + let targets = cx.dep_targets(&unit); + Ok(targets + .iter() + .filter_map(|unit| { + // Binaries aren't actually needed to *compile* tests, just to run + // them, so we don't include this dependency edge in the job graph. + if self.target.is_test() && unit.target.is_bin() { + None + } else { + Some(Key::new(unit)) + } + }) + .collect()) + } +} + +impl<'a> fmt::Debug for Key<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{} => {}/{} => {:?}", + self.pkg, self.target, self.profile, self.kind + ) + } +} diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs new file mode 100644 index 000000000..52af1defa --- /dev/null +++ b/src/cargo/ops/cargo_rustc/layout.rs @@ -0,0 +1,205 @@ +//! Management of the directory layout of a build +//! +//! The directory layout is a little tricky at times, hence a separate file to +//! house this logic. The current layout looks like this: +//! +//! ```ignore +//! # This is the root directory for all output, the top-level package +//! # places all of its output here. +//! target/ +//! +//! # This is the root directory for all output of *dependencies* +//! deps/ +//! +//! # Root directory for all compiled examples +//! examples/ +//! +//! # This is the location at which the output of all custom build +//! # commands are rooted +//! build/ +//! +//! # Each package gets its own directory where its build script and +//! # script output are placed +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Each directory package has a `out` directory where output +//! # is placed. +//! out/ +//! +//! # This is the location at which the output of all old custom build +//! # commands are rooted +//! native/ +//! +//! # Each package gets its own directory for where its output is +//! # placed. We can't track exactly what's getting put in here, so +//! # we just assume that all relevant output is in these +//! # directories. +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Directory used to store incremental data for the compiler (when +//! # incremental is enabled. +//! incremental/ +//! +//! # Hidden directory that holds all of the fingerprint files for all +//! # packages +//! .fingerprint/ +//! ``` + +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; + +use core::Workspace; +use util::{CargoResult, Config, FileLock, Filesystem}; + +/// Contains the paths of all target output locations. +/// +/// See module docs for more information. +pub struct Layout { + root: PathBuf, + deps: PathBuf, + native: PathBuf, + build: PathBuf, + incremental: PathBuf, + fingerprint: PathBuf, + examples: PathBuf, + /// The lockfile for a build, will be unlocked when this struct is `drop`ped. + _lock: FileLock, +} + +pub fn is_bad_artifact_name(name: &str) -> bool { + ["deps", "examples", "build", "native", "incremental"] + .iter() + .any(|&reserved| reserved == name) +} + +impl Layout { + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + /// + /// Differs from `at` in that this calculates the root path from the workspace target directory, + /// adding the target triple and the profile (debug, release, ...). + pub fn new(ws: &Workspace, triple: Option<&str>, dest: &str) -> CargoResult { + let mut path = ws.target_dir(); + // Flexible target specifications often point at filenames, so interpret + // the target triple as a Path and then just use the file stem as the + // component for the directory name. + if let Some(triple) = triple { + path.push(Path::new(triple) + .file_stem() + .ok_or_else(|| format_err!("target was empty"))?); + } + path.push(dest); + Layout::at(ws.config(), path) + } + + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // For now we don't do any more finer-grained locking on the artifact + // directory, so just lock the entire thing for the duration of this + // compile. + let lock = root.open_rw(".cargo-lock", config, "build directory")?; + let root = root.into_path_unlocked(); + + Ok(Layout { + deps: root.join("deps"), + native: root.join("native"), + build: root.join("build"), + incremental: root.join("incremental"), + fingerprint: root.join(".fingerprint"), + examples: root.join("examples"), + root, + _lock: lock, + }) + } + + #[cfg(not(target_os = "macos"))] + fn exclude_from_backups(&self, _: &Path) {} + + #[cfg(target_os = "macos")] + /// Marks files or directories as excluded from Time Machine on macOS + /// + /// This is recommended to prevent derived/temporary files from bloating backups. + fn exclude_from_backups(&self, path: &Path) { + use std::ptr; + use core_foundation::{number, string, url}; + use core_foundation::base::TCFType; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + match (url::CFURL::from_path(path, false), is_excluded_key) { + (Some(path), Ok(is_excluded_key)) => unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + }, + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working + _ => {} + } + } + + /// Make sure all directories stored in the Layout exist on the filesystem. + pub fn prepare(&mut self) -> io::Result<()> { + if fs::metadata(&self.root).is_err() { + fs::create_dir_all(&self.root)?; + } + + self.exclude_from_backups(&self.root); + + mkdir(&self.deps)?; + mkdir(&self.native)?; + mkdir(&self.incremental)?; + mkdir(&self.fingerprint)?; + mkdir(&self.examples)?; + mkdir(&self.build)?; + + return Ok(()); + + fn mkdir(dir: &Path) -> io::Result<()> { + if fs::metadata(&dir).is_err() { + fs::create_dir(dir)?; + } + Ok(()) + } + } + + /// Fetch the root path. + pub fn dest(&self) -> &Path { + &self.root + } + /// Fetch the deps path. + pub fn deps(&self) -> &Path { + &self.deps + } + /// Fetch the examples path. + pub fn examples(&self) -> &Path { + &self.examples + } + /// Fetch the root path. + pub fn root(&self) -> &Path { + &self.root + } + /// Fetch the incremental path. + pub fn incremental(&self) -> &Path { + &self.incremental + } + /// Fetch the fingerprint path. + pub fn fingerprint(&self) -> &Path { + &self.fingerprint + } + /// Fetch the build path. + pub fn build(&self) -> &Path { + &self.build + } +} diff --git a/src/cargo/ops/cargo_rustc/links.rs b/src/cargo/ops/cargo_rustc/links.rs new file mode 100644 index 000000000..abcad2acd --- /dev/null +++ b/src/cargo/ops/cargo_rustc/links.rs @@ -0,0 +1,72 @@ +use std::collections::{HashMap, HashSet}; +use std::fmt::Write; + +use core::{PackageId, Resolve}; +use util::CargoResult; +use super::Unit; + +#[derive(Default)] +pub struct Links<'a> { + validated: HashSet<&'a PackageId>, + links: HashMap, +} + +impl<'a> Links<'a> { + pub fn new() -> Links<'a> { + Links { + validated: HashSet::new(), + links: HashMap::new(), + } + } + + pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> { + if !self.validated.insert(unit.pkg.package_id()) { + return Ok(()); + } + let lib = match unit.pkg.manifest().links() { + Some(lib) => lib, + None => return Ok(()), + }; + if let Some(prev) = self.links.get(lib) { + let pkg = unit.pkg.package_id(); + + let describe_path = |pkgid: &PackageId| -> String { + let dep_path = resolve.path_to_top(pkgid); + let mut dep_path_desc = format!("package `{}`", dep_path[0]); + for dep in dep_path.iter().skip(1) { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); + } + dep_path_desc + }; + + bail!( + "multiple packages link to native library `{}`, \ + but a native library can be linked only once\n\ + \n\ + {}\nlinks to native library `{}`\n\ + \n\ + {}\nalso links to native library `{}`", + lib, + describe_path(prev), + lib, + describe_path(pkg), + lib + ) + } + if !unit.pkg + .manifest() + .targets() + .iter() + .any(|t| t.is_custom_build()) + { + bail!( + "package `{}` specifies that it links to `{}` but does not \ + have a custom build script", + unit.pkg.package_id(), + lib + ) + } + self.links.insert(lib.to_string(), unit.pkg.package_id()); + Ok(()) + } +} diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs new file mode 100644 index 000000000..950a829be --- /dev/null +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -0,0 +1,1125 @@ +use std::collections::{HashMap, HashSet}; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::io::{self, Write}; +use std::path::{self, PathBuf}; +use std::sync::Arc; + +use same_file::is_same_file; +use serde_json; + +use core::{Feature, Package, PackageId, PackageSet, Resolve, Target}; +use core::{Profile, Profiles, Workspace}; +use core::manifest::Lto; +use core::shell::ColorChoice; +use util::{self, machine_message, ProcessBuilder}; +use util::{internal, join_paths, profile, Config}; +use util::paths; +use util::errors::{CargoResult, CargoResultExt, Internal}; +use util::Freshness; + +use self::job::{Job, Work}; +use self::job_queue::JobQueue; + +use self::output_depinfo::output_depinfo; + +pub use self::compilation::Compilation; +pub use self::context::{Context, TargetFileType, Unit}; +pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts}; +pub use self::layout::is_bad_artifact_name; + +mod compilation; +mod context; +mod custom_build; +mod fingerprint; +mod job; +mod job_queue; +mod layout; +mod links; +mod output_depinfo; + +/// Whether an object is for the host arch, or the target arch. +/// +/// These will be the same unless cross-compiling. +#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] +pub enum Kind { + Host, + Target, +} + +/// Configuration information for a rustc build. +#[derive(Default, Clone)] +pub struct BuildConfig { + /// The host arch triple + /// + /// e.g. x86_64-unknown-linux-gnu, would be + /// - machine: x86_64 + /// - hardware-platform: unknown + /// - operating system: linux-gnu + pub host_triple: String, + /// Build information for the host arch + pub host: TargetConfig, + /// The target arch triple, defaults to host arch + pub requested_target: Option, + /// Build information for the target + pub target: TargetConfig, + /// How many rustc jobs to run in parallel + pub jobs: u32, + /// Whether we are building for release + pub release: bool, + /// Whether we are running tests + pub test: bool, + /// Whether we are building documentation + pub doc_all: bool, + /// Whether to print std output in json format (for machine reading) + pub json_messages: bool, +} + +/// Information required to build for a target +#[derive(Clone, Default)] +pub struct TargetConfig { + /// The path of archiver (lib builder) for this target. + pub ar: Option, + /// The path of the linker for this target. + pub linker: Option, + /// Special build options for any necessary input files (filename -> options) + pub overrides: HashMap, +} + +pub type PackagesToBuild<'a> = [(&'a Package, Vec<(&'a Target, &'a Profile)>)]; + +/// A glorified callback for executing calls to rustc. Rather than calling rustc +/// directly, we'll use an Executor, giving clients an opportunity to intercept +/// the build calls. +pub trait Executor: Send + Sync + 'static { + /// Called after a rustc process invocation is prepared up-front for a given + /// unit of work (may still be modified for runtime-known dependencies, when + /// the work is actually executed). + fn init(&self, _cx: &Context, _unit: &Unit) {} + + /// In case of an `Err`, Cargo will not continue with the build process for + /// this package. + fn exec(&self, cmd: ProcessBuilder, _id: &PackageId, _target: &Target) -> CargoResult<()> { + cmd.exec()?; + Ok(()) + } + + fn exec_json( + &self, + cmd: ProcessBuilder, + _id: &PackageId, + _target: &Target, + handle_stdout: &mut FnMut(&str) -> CargoResult<()>, + handle_stderr: &mut FnMut(&str) -> CargoResult<()>, + ) -> CargoResult<()> { + cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?; + Ok(()) + } + + /// Queried when queuing each unit of work. If it returns true, then the + /// unit will always be rebuilt, independent of whether it needs to be. + fn force_rebuild(&self, _unit: &Unit) -> bool { + false + } +} + +/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's +/// default behaviour. +#[derive(Copy, Clone)] +pub struct DefaultExecutor; + +impl Executor for DefaultExecutor {} + +// Returns a mapping of the root package plus its immediate dependencies to +// where the compiled libraries are all located. +pub fn compile_targets<'a, 'cfg: 'a>( + ws: &Workspace<'cfg>, + pkg_targets: &'a PackagesToBuild<'a>, + packages: &'a PackageSet<'cfg>, + resolve: &'a Resolve, + config: &'cfg Config, + build_config: BuildConfig, + profiles: &'a Profiles, + exec: &Arc, +) -> CargoResult> { + let units = pkg_targets + .iter() + .flat_map(|&(pkg, ref targets)| { + let default_kind = if build_config.requested_target.is_some() { + Kind::Target + } else { + Kind::Host + }; + targets.iter().map(move |&(target, profile)| Unit { + pkg, + target, + profile, + kind: if target.for_host() { + Kind::Host + } else { + default_kind + }, + }) + }) + .collect::>(); + + let mut cx = Context::new( + ws, + resolve, + packages, + config, + build_config, + profiles, + &units, + )?; + + let mut queue = JobQueue::new(&cx); + + cx.prepare()?; + cx.build_used_in_plugin_map(&units)?; + custom_build::build_map(&mut cx, &units)?; + + for unit in units.iter() { + // Build up a list of pending jobs, each of which represent + // compiling a particular package. No actual work is executed as + // part of this, that's all done next as part of the `execute` + // function which will run everything in order with proper + // parallelism. + compile(&mut cx, &mut queue, unit, exec)?; + } + + // Now that we've figured out everything that we're going to do, do it! + queue.execute(&mut cx)?; + + for unit in units.iter() { + for &(ref dst, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() { + if file_type == TargetFileType::DebugInfo { + continue; + } + + let bindst = match *link_dst { + Some(ref link_dst) => link_dst, + None => dst, + }; + + if unit.profile.test { + cx.compilation.tests.push(( + unit.pkg.clone(), + unit.target.kind().clone(), + unit.target.name().to_string(), + dst.clone(), + )); + } else if unit.target.is_bin() || unit.target.is_example() { + cx.compilation.binaries.push(bindst.clone()); + } else if unit.target.is_lib() { + let pkgid = unit.pkg.package_id().clone(); + cx.compilation + .libraries + .entry(pkgid) + .or_insert_with(HashSet::new) + .insert((unit.target.clone(), dst.clone())); + } + } + + for dep in cx.dep_targets(unit).iter() { + if !unit.target.is_lib() { + continue; + } + + if dep.profile.run_custom_build { + let out_dir = cx.build_script_out_dir(dep).display().to_string(); + cx.compilation + .extra_env + .entry(dep.pkg.package_id().clone()) + .or_insert_with(Vec::new) + .push(("OUT_DIR".to_string(), out_dir)); + } + + if !dep.target.is_lib() { + continue; + } + if dep.profile.doc { + continue; + } + + let v = cx.target_filenames(dep)?; + cx.compilation + .libraries + .entry(unit.pkg.package_id().clone()) + .or_insert_with(HashSet::new) + .extend( + v.iter() + .map(|&(ref f, _, _)| (dep.target.clone(), f.clone())), + ); + } + + let feats = cx.resolve.features(unit.pkg.package_id()); + if !feats.is_empty() { + cx.compilation + .cfgs + .entry(unit.pkg.package_id().clone()) + .or_insert_with(|| { + feats + .iter() + .map(|feat| format!("feature=\"{}\"", feat)) + .collect() + }); + } + let rustdocflags = cx.rustdocflags_args(unit)?; + if !rustdocflags.is_empty() { + cx.compilation + .rustdocflags + .entry(unit.pkg.package_id().clone()) + .or_insert(rustdocflags); + } + + output_depinfo(&mut cx, unit)?; + } + + for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { + cx.compilation + .cfgs + .entry(pkg.clone()) + .or_insert_with(HashSet::new) + .extend(output.cfgs.iter().cloned()); + + cx.compilation + .extra_env + .entry(pkg.clone()) + .or_insert_with(Vec::new) + .extend(output.env.iter().cloned()); + + for dir in output.library_paths.iter() { + cx.compilation.native_dirs.insert(dir.clone()); + } + } + cx.compilation.target = cx.target_triple().to_string(); + Ok(cx.compilation) +} + +fn compile<'a, 'cfg: 'a>( + cx: &mut Context<'a, 'cfg>, + jobs: &mut JobQueue<'a>, + unit: &Unit<'a>, + exec: &Arc, +) -> CargoResult<()> { + if !cx.compiled.insert(*unit) { + return Ok(()); + } + + // Build up the work to be done to compile this unit, enqueuing it once + // we've got everything constructed. + let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); + fingerprint::prepare_init(cx, unit)?; + cx.links.validate(cx.resolve, unit)?; + + let (dirty, fresh, freshness) = if unit.profile.run_custom_build { + custom_build::prepare(cx, unit)? + } else if unit.profile.doc && unit.profile.test { + // we run these targets later, so this is just a noop for now + (Work::noop(), Work::noop(), Freshness::Fresh) + } else { + let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?; + let work = if unit.profile.doc { + rustdoc(cx, unit)? + } else { + rustc(cx, unit, exec)? + }; + // Need to link targets on both the dirty and fresh + let dirty = work.then(link_targets(cx, unit, false)?).then(dirty); + let fresh = link_targets(cx, unit, true)?.then(fresh); + + if exec.force_rebuild(unit) { + freshness = Freshness::Dirty; + } + + (dirty, fresh, freshness) + }; + jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?; + drop(p); + + // Be sure to compile all dependencies of this target as well. + for unit in cx.dep_targets(unit).iter() { + compile(cx, jobs, unit, exec)?; + } + + Ok(()) +} + +fn rustc<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + exec: &Arc, +) -> CargoResult { + let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; + + let name = unit.pkg.name().to_string(); + + // If this is an upstream dep we don't want warnings from, turn off all + // lints. + if !cx.show_warnings(unit.pkg.package_id()) { + rustc.arg("--cap-lints").arg("allow"); + + // If this is an upstream dep but we *do* want warnings, make sure that they + // don't fail compilation. + } else if !unit.pkg.package_id().source_id().is_path() { + rustc.arg("--cap-lints").arg("warn"); + } + + let filenames = cx.target_filenames(unit)?; + let root = cx.out_dir(unit); + let kind = unit.kind; + + // Prepare the native lib state (extra -L and -l flags) + let build_state = cx.build_state.clone(); + let current_id = unit.pkg.package_id().clone(); + let build_deps = load_build_deps(cx, unit); + + // If we are a binary and the package also contains a library, then we + // don't pass the `-l` flags. + let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); + let do_rename = unit.target.allows_underscores() && !unit.profile.test; + let real_name = unit.target.name().to_string(); + let crate_name = unit.target.crate_name(); + + // XXX(Rely on target_filenames iterator as source of truth rather than rederiving filestem) + let rustc_dep_info_loc = if do_rename && cx.target_metadata(unit).is_none() { + root.join(&crate_name) + } else { + root.join(&cx.file_stem(unit)) + }.with_extension("d"); + let dep_info_loc = fingerprint::dep_info_loc(cx, unit); + + rustc.args(&cx.rustflags_args(unit)?); + let json_messages = cx.build_config.json_messages; + let package_id = unit.pkg.package_id().clone(); + let target = unit.target.clone(); + + exec.init(cx, unit); + let exec = exec.clone(); + + let root_output = cx.target_root().to_path_buf(); + let pkg_root = unit.pkg.root().to_path_buf(); + let cwd = rustc + .get_cwd() + .unwrap_or_else(|| cx.config.cwd()) + .to_path_buf(); + + return Ok(Work::new(move |state| { + // Only at runtime have we discovered what the extra -L and -l + // arguments are for native libraries, so we process those here. We + // also need to be sure to add any -L paths for our plugins to the + // dynamic library load path as a plugin's dynamic library may be + // located somewhere in there. + // Finally, if custom environment variables have been produced by + // previous build scripts, we include them in the rustc invocation. + if let Some(build_deps) = build_deps { + let build_state = build_state.outputs.lock().unwrap(); + add_native_deps( + &mut rustc, + &build_state, + &build_deps, + pass_l_flag, + ¤t_id, + )?; + add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?; + add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?; + } + + for &(ref filename, ref _link_dst, _linkable) in filenames.iter() { + // If there is both an rmeta and rlib, rustc will prefer to use the + // rlib, even if it is older. Therefore, we must delete the rlib to + // force using the new rmeta. + if filename.extension() == Some(OsStr::new("rmeta")) { + let dst = root.join(filename).with_extension("rlib"); + if dst.exists() { + paths::remove_file(&dst)?; + } + } + } + + state.running(&rustc); + if json_messages { + exec.exec_json( + rustc, + &package_id, + &target, + &mut |line| { + if !line.is_empty() { + Err(internal(&format!( + "compiler stdout is not empty: `{}`", + line + ))) + } else { + Ok(()) + } + }, + &mut |line| { + // stderr from rustc can have a mix of JSON and non-JSON output + if line.starts_with('{') { + // Handle JSON lines + let compiler_message = serde_json::from_str(line).map_err(|_| { + internal(&format!("compiler produced invalid json: `{}`", line)) + })?; + + machine_message::emit(&machine_message::FromCompiler { + package_id: &package_id, + target: &target, + message: compiler_message, + }); + } else { + // Forward non-JSON to stderr + writeln!(io::stderr(), "{}", line)?; + } + Ok(()) + }, + ).chain_err(|| format!("Could not compile `{}`.", name))?; + } else { + exec.exec(rustc, &package_id, &target) + .map_err(Internal::new) + .chain_err(|| format!("Could not compile `{}`.", name))?; + } + + if do_rename && real_name != crate_name { + let dst = &filenames[0].0; + let src = dst.with_file_name( + dst.file_name() + .unwrap() + .to_str() + .unwrap() + .replace(&real_name, &crate_name), + ); + if src.exists() && src.file_name() != dst.file_name() { + fs::rename(&src, &dst) + .chain_err(|| internal(format!("could not rename crate {:?}", src)))?; + } + } + + if rustc_dep_info_loc.exists() { + fingerprint::translate_dep_info(&rustc_dep_info_loc, &dep_info_loc, &pkg_root, &cwd) + .chain_err(|| { + internal(format!( + "could not parse/generate dep info at: {}", + rustc_dep_info_loc.display() + )) + })?; + } + + Ok(()) + })); + + // Add all relevant -L and -l flags from dependencies (now calculated and + // present in `state`) to the command provided + fn add_native_deps( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + pass_l_flag: bool, + current_id: &PackageId, + ) -> CargoResult<()> { + for key in build_scripts.to_link.iter() { + let output = build_state.get(key).ok_or_else(|| { + internal(format!( + "couldn't find build state for {}/{:?}", + key.0, key.1 + )) + })?; + for path in output.library_paths.iter() { + rustc.arg("-L").arg(path); + } + if key.0 == *current_id { + for cfg in &output.cfgs { + rustc.arg("--cfg").arg(cfg); + } + if pass_l_flag { + for name in output.library_links.iter() { + rustc.arg("-l").arg(name); + } + } + } + } + Ok(()) + } + + // Add all custom environment variables present in `state` (after they've + // been put there by one of the `build_scripts`) to the command provided. + fn add_custom_env( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + current_id: &PackageId, + kind: Kind, + ) -> CargoResult<()> { + let key = (current_id.clone(), kind); + if let Some(output) = build_state.get(&key) { + for &(ref name, ref value) in output.env.iter() { + rustc.env(name, value); + } + } + Ok(()) + } +} + +/// Link the compiled target (often of form `foo-{metadata_hash}`) to the +/// final target. This must happen during both "Fresh" and "Compile" +fn link_targets<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + fresh: bool, +) -> CargoResult { + let filenames = cx.target_filenames(unit)?; + let package_id = unit.pkg.package_id().clone(); + let target = unit.target.clone(); + let profile = unit.profile.clone(); + let features = cx.resolve + .features_sorted(&package_id) + .into_iter() + .map(|s| s.to_owned()) + .collect(); + let json_messages = cx.build_config.json_messages; + + Ok(Work::new(move |_| { + // If we're a "root crate", e.g. the target of this compilation, then we + // hard link our outputs out of the `deps` directory into the directory + // above. This means that `cargo build` will produce binaries in + // `target/debug` which one probably expects. + let mut destinations = vec![]; + for &(ref src, ref link_dst, _file_type) in filenames.iter() { + // This may have been a `cargo rustc` command which changes the + // output, so the source may not actually exist. + if !src.exists() { + continue; + } + let dst = match link_dst.as_ref() { + Some(dst) => dst, + None => { + destinations.push(src.display().to_string()); + continue; + } + }; + destinations.push(dst.display().to_string()); + + debug!("linking {} to {}", src.display(), dst.display()); + if is_same_file(src, dst).unwrap_or(false) { + continue; + } + if dst.exists() { + paths::remove_file(&dst)?; + } + + let link_result = if src.is_dir() { + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(target_os = "redox")] + use std::os::redox::fs::symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; + + let dst_dir = dst.parent().unwrap(); + assert!(src.starts_with(dst_dir)); + symlink(src.strip_prefix(dst_dir).unwrap(), dst) + } else { + fs::hard_link(src, dst) + }; + link_result + .or_else(|err| { + debug!("link failed {}. falling back to fs::copy", err); + fs::copy(src, dst).map(|_| ()) + }) + .chain_err(|| { + format!( + "failed to link or copy `{}` to `{}`", + src.display(), + dst.display() + ) + })?; + } + + if json_messages { + machine_message::emit(&machine_message::Artifact { + package_id: &package_id, + target: &target, + profile: &profile, + features, + filenames: destinations, + fresh, + }); + } + Ok(()) + })) +} + +fn load_build_deps(cx: &Context, unit: &Unit) -> Option> { + cx.build_scripts.get(unit).cloned() +} + +// For all plugin dependencies, add their -L paths (now calculated and +// present in `state`) to the dynamic library load path for the command to +// execute. +fn add_plugin_deps( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + root_output: &PathBuf, +) -> CargoResult<()> { + let var = util::dylib_path_envvar(); + let search_path = rustc.get_env(var).unwrap_or_default(); + let mut search_path = env::split_paths(&search_path).collect::>(); + for id in build_scripts.plugins.iter() { + let key = (id.clone(), Kind::Host); + let output = build_state + .get(&key) + .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?; + search_path.append(&mut filter_dynamic_search_path( + output.library_paths.iter(), + root_output, + )); + } + let search_path = join_paths(&search_path, var)?; + rustc.env(var, &search_path); + Ok(()) +} + +// Determine paths to add to the dynamic search path from -L entries +// +// Strip off prefixes like "native=" or "framework=" and filter out directories +// *not* inside our output directory since they are likely spurious and can cause +// clashes with system shared libraries (issue #3366). +fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec +where + I: Iterator, +{ + let mut search_path = vec![]; + for dir in paths { + let dir = match dir.to_str() { + Some(s) => { + let mut parts = s.splitn(2, '='); + match (parts.next(), parts.next()) { + (Some("native"), Some(path)) + | (Some("crate"), Some(path)) + | (Some("dependency"), Some(path)) + | (Some("framework"), Some(path)) + | (Some("all"), Some(path)) => path.into(), + _ => dir.clone(), + } + } + None => dir.clone(), + }; + if dir.starts_with(&root_output) { + search_path.push(dir); + } else { + debug!( + "Not including path {} in runtime library search path because it is \ + outside target root {}", + dir.display(), + root_output.display() + ); + } + } + search_path +} + +fn prepare_rustc<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + crate_types: &[&str], + unit: &Unit<'a>, +) -> CargoResult { + let mut base = cx.compilation.rustc_process(unit.pkg)?; + base.inherit_jobserver(&cx.jobserver); + build_base_args(cx, &mut base, unit, crate_types)?; + build_deps_args(&mut base, cx, unit)?; + Ok(base) +} + +fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { + let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?; + rustdoc.inherit_jobserver(&cx.jobserver); + rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); + add_path_args(cx, unit, &mut rustdoc); + + if unit.kind != Kind::Host { + if let Some(target) = cx.requested_target() { + rustdoc.arg("--target").arg(target); + } + } + + let doc_dir = cx.out_dir(unit); + + // Create the documentation directory ahead of time as rustdoc currently has + // a bug where concurrent invocations will race to create this directory if + // it doesn't already exist. + fs::create_dir_all(&doc_dir)?; + + rustdoc.arg("-o").arg(doc_dir); + + for feat in cx.resolve.features_sorted(unit.pkg.package_id()) { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + + if let Some(ref args) = unit.profile.rustdoc_args { + rustdoc.args(args); + } + + build_deps_args(&mut rustdoc, cx, unit)?; + + rustdoc.args(&cx.rustdocflags_args(unit)?); + + let name = unit.pkg.name().to_string(); + let build_state = cx.build_state.clone(); + let key = (unit.pkg.package_id().clone(), unit.kind); + + Ok(Work::new(move |state| { + if let Some(output) = build_state.outputs.lock().unwrap().get(&key) { + for cfg in output.cfgs.iter() { + rustdoc.arg("--cfg").arg(cfg); + } + for &(ref name, ref value) in output.env.iter() { + rustdoc.env(name, value); + } + } + state.running(&rustdoc); + rustdoc + .exec() + .chain_err(|| format!("Could not document `{}`.", name))?; + Ok(()) + })) +} + +// The path that we pass to rustc is actually fairly important because it will +// show up in error messages (important for readability), debug information +// (important for caching), etc. As a result we need to be pretty careful how we +// actually invoke rustc. +// +// In general users don't expect `cargo build` to cause rebuilds if you change +// directories. That could be if you just change directories in the project or +// if you literally move the whole project wholesale to a new directory. As a +// result we mostly don't factor in `cwd` to this calculation. Instead we try to +// track the workspace as much as possible and we update the current directory +// of rustc/rustdoc where approrpriate. +// +// The first returned value here is the argument to pass to rustc, and the +// second is the cwd that rustc should operate in. +fn path_args(cx: &Context, unit: &Unit) -> (PathBuf, PathBuf) { + let ws_root = cx.ws.root(); + let src = unit.target.src_path(); + assert!(src.is_absolute()); + match src.strip_prefix(ws_root) { + Ok(path) => (path.to_path_buf(), ws_root.to_path_buf()), + Err(_) => (src.to_path_buf(), unit.pkg.root().to_path_buf()), + } +} + +fn add_path_args(cx: &Context, unit: &Unit, cmd: &mut ProcessBuilder) { + let (arg, cwd) = path_args(cx, unit); + cmd.arg(arg); + cmd.cwd(cwd); +} + +fn build_base_args<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + cmd: &mut ProcessBuilder, + unit: &Unit<'a>, + crate_types: &[&str], +) -> CargoResult<()> { + let Profile { + ref opt_level, + ref lto, + codegen_units, + ref rustc_args, + debuginfo, + debug_assertions, + overflow_checks, + rpath, + test, + doc: _doc, + run_custom_build, + ref panic, + check, + .. + } = *unit.profile; + assert!(!run_custom_build); + + cmd.arg("--crate-name").arg(&unit.target.crate_name()); + + add_path_args(cx, unit, cmd); + + match cx.config.shell().color_choice() { + ColorChoice::Always => { + cmd.arg("--color").arg("always"); + } + ColorChoice::Never => { + cmd.arg("--color").arg("never"); + } + ColorChoice::CargoAuto => {} + } + + if cx.build_config.json_messages { + cmd.arg("--error-format").arg("json"); + } + + if !test { + for crate_type in crate_types.iter() { + cmd.arg("--crate-type").arg(crate_type); + } + } + + if check { + cmd.arg("--emit=dep-info,metadata"); + } else { + cmd.arg("--emit=dep-info,link"); + } + + let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) + || (crate_types.contains(&"dylib") && cx.ws.members().any(|p| p != unit.pkg)); + if prefer_dynamic { + cmd.arg("-C").arg("prefer-dynamic"); + } + + if opt_level != "0" { + cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); + } + + // If a panic mode was configured *and* we're not ever going to be used in a + // plugin, then we can compile with that panic mode. + // + // If we're used in a plugin then we'll eventually be linked to libsyntax + // most likely which isn't compiled with a custom panic mode, so we'll just + // get an error if we actually compile with that. This fixes `panic=abort` + // crates which have plugin dependencies, but unfortunately means that + // dependencies shared between the main application and plugins must be + // compiled without `panic=abort`. This isn't so bad, though, as the main + // application will still be compiled with `panic=abort`. + if let Some(panic) = panic.as_ref() { + if !cx.used_in_plugin.contains(unit) { + cmd.arg("-C").arg(format!("panic={}", panic)); + } + } + let manifest = unit.pkg.manifest(); + + if manifest.features().is_enabled(Feature::epoch()) { + cmd.arg(format!("-Zepoch={}", manifest.epoch())); + } + + // Disable LTO for host builds as prefer_dynamic and it are mutually + // exclusive. + if unit.target.can_lto() && !unit.target.for_host() { + match *lto { + Lto::Bool(false) => {} + Lto::Bool(true) => { + cmd.args(&["-C", "lto"]); + } + Lto::Named(ref s) => { + cmd.arg("-C").arg(format!("lto={}", s)); + } + } + } + + if let Some(n) = codegen_units { + // There are some restrictions with LTO and codegen-units, so we + // only add codegen units when LTO is not used. + cmd.arg("-C").arg(&format!("codegen-units={}", n)); + } + + if let Some(debuginfo) = debuginfo { + cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); + } + + if let Some(ref args) = *rustc_args { + cmd.args(args); + } + + // -C overflow-checks is implied by the setting of -C debug-assertions, + // so we only need to provide -C overflow-checks if it differs from + // the value of -C debug-assertions we would provide. + if opt_level != "0" { + if debug_assertions { + cmd.args(&["-C", "debug-assertions=on"]); + if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + } else if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !debug_assertions { + cmd.args(&["-C", "debug-assertions=off"]); + if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + + if test && unit.target.harness() { + cmd.arg("--test"); + } else if test { + cmd.arg("--cfg").arg("test"); + } + + // We ideally want deterministic invocations of rustc to ensure that + // rustc-caching strategies like sccache are able to cache more, so sort the + // feature list here. + for feat in cx.resolve.features_sorted(unit.pkg.package_id()) { + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + + match cx.target_metadata(unit) { + Some(m) => { + cmd.arg("-C").arg(&format!("metadata={}", m)); + cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); + } + None => { + cmd.arg("-C") + .arg(&format!("metadata={}", cx.target_short_hash(unit))); + } + } + + if rpath { + cmd.arg("-C").arg("rpath"); + } + + cmd.arg("--out-dir").arg(&cx.out_dir(unit)); + + fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { + if let Some(val) = val { + let mut joined = OsString::from(prefix); + joined.push(val); + cmd.arg(key).arg(joined); + } + } + + if unit.kind == Kind::Target { + opt( + cmd, + "--target", + "", + cx.requested_target().map(|s| s.as_ref()), + ); + } + + opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref())); + opt( + cmd, + "-C", + "linker=", + cx.linker(unit.kind).map(|s| s.as_ref()), + ); + cmd.args(&cx.incremental_args(unit)?); + + Ok(()) +} + +fn build_deps_args<'a, 'cfg>( + cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult<()> { + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.deps_dir(unit)); + deps + }); + + // Be sure that the host path is also listed. This'll ensure that proc-macro + // dependencies are correctly found (for reexported macros). + if let Kind::Target = unit.kind { + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.host_deps()); + deps + }); + } + + let dep_targets = cx.dep_targets(unit); + + // If there is not one linkable target but should, rustc fails later + // on if there is an `extern crate` for it. This may turn into a hard + // error in the future, see PR #4797 + if !dep_targets + .iter() + .any(|u| !u.profile.doc && u.target.linkable()) + { + if let Some(u) = dep_targets + .iter() + .find(|u| !u.profile.doc && u.target.is_lib()) + { + cx.config.shell().warn(format!( + "The package `{}` \ + provides no linkable target. The compiler might raise an error while compiling \ + `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ + Cargo.toml. This warning might turn into a hard error in the future.", + u.target.crate_name(), + unit.target.crate_name(), + u.target.crate_name() + ))?; + } + } + + for dep in dep_targets { + if dep.profile.run_custom_build { + cmd.env("OUT_DIR", &cx.build_script_out_dir(&dep)); + } + if dep.target.linkable() && !dep.profile.doc { + link_to(cmd, cx, unit, &dep)?; + } + } + + return Ok(()); + + fn link_to<'a, 'cfg>( + cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + current: &Unit<'a>, + dep: &Unit<'a>, + ) -> CargoResult<()> { + for &(ref dst, _, file_type) in cx.target_filenames(dep)?.iter() { + if file_type != TargetFileType::Linkable { + continue; + } + let mut v = OsString::new(); + + // Unfortunately right now Cargo doesn't have a great way to get a + // 1:1 mapping of entries in `dependencies()` to the actual crate + // we're depending on. Instead we're left to do some guesswork here + // to figure out what `Dependency` the `dep` unit corresponds to in + // `current` to see if we're renaming it. + // + // This I believe mostly works out for now, but we'll likely want + // to tighten up this in the future. + let name = current + .pkg + .dependencies() + .iter() + .filter(|d| d.matches_ignoring_source(dep.pkg.package_id())) + .filter_map(|d| d.rename()) + .next(); + + v.push(name.unwrap_or(&dep.target.crate_name())); + v.push("="); + v.push(cx.out_dir(dep)); + v.push(&path::MAIN_SEPARATOR.to_string()); + v.push(&dst.file_name().unwrap()); + cmd.arg("--extern").arg(&v); + } + Ok(()) + } +} + +fn envify(s: &str) -> String { + s.chars() + .flat_map(|c| c.to_uppercase()) + .map(|c| if c == '-' { '_' } else { c }) + .collect() +} + +impl Kind { + fn for_target(&self, target: &Target) -> Kind { + // Once we start compiling for the `Host` kind we continue doing so, but + // if we are a `Target` kind and then we start compiling for a target + // that needs to be on the host we lift ourselves up to `Host` + match *self { + Kind::Host => Kind::Host, + Kind::Target if target.for_host() => Kind::Host, + Kind::Target => Kind::Target, + } + } +} diff --git a/src/cargo/ops/cargo_rustc/output_depinfo.rs b/src/cargo/ops/cargo_rustc/output_depinfo.rs new file mode 100644 index 000000000..a8a83425e --- /dev/null +++ b/src/cargo/ops/cargo_rustc/output_depinfo.rs @@ -0,0 +1,125 @@ +use std::collections::{BTreeSet, HashSet}; +use std::io::{BufWriter, Write}; +use std::fs::File; +use std::path::{Path, PathBuf}; + +use ops::{Context, Unit}; +use util::{internal, CargoResult}; +use util::paths; +use ops::cargo_rustc::fingerprint; + +fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { + let path = path.as_ref(); + let relpath = match basedir { + None => path, + Some(base) => match path.strip_prefix(base) { + Ok(relpath) => relpath, + _ => path, + }, + }; + relpath + .to_str() + .ok_or_else(|| internal("path not utf-8")) + .map(|f| f.replace(" ", "\\ ")) +} + +fn add_deps_for_unit<'a, 'b>( + deps: &mut BTreeSet, + context: &mut Context<'a, 'b>, + unit: &Unit<'a>, + visited: &mut HashSet>, +) -> CargoResult<()> { + if !visited.insert(*unit) { + return Ok(()); + } + + // units representing the execution of a build script don't actually + // generate a dep info file, so we just keep on going below + if !unit.profile.run_custom_build { + // Add dependencies from rustc dep-info output (stored in fingerprint directory) + let dep_info_loc = fingerprint::dep_info_loc(context, unit); + if let Some(paths) = fingerprint::parse_dep_info(unit.pkg, &dep_info_loc)? { + for path in paths { + deps.insert(path); + } + } else { + debug!( + "can't find dep_info for {:?} {:?}", + unit.pkg.package_id(), + unit.profile + ); + return Err(internal("dep_info missing")); + } + } + + // Add rerun-if-changed dependencies + let key = (unit.pkg.package_id().clone(), unit.kind); + if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) { + for path in &output.rerun_if_changed { + deps.insert(path.into()); + } + } + + // Recursively traverse all transitive dependencies + for dep_unit in context.dep_targets(unit).iter() { + let source_id = dep_unit.pkg.package_id().source_id(); + if source_id.is_path() { + add_deps_for_unit(deps, context, dep_unit, visited)?; + } + } + Ok(()) +} + +pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) -> CargoResult<()> { + let mut deps = BTreeSet::new(); + let mut visited = HashSet::new(); + let success = add_deps_for_unit(&mut deps, context, unit, &mut visited).is_ok(); + let basedir_string; + let basedir = match context.config.get_path("build.dep-info-basedir")? { + Some(value) => { + basedir_string = value + .val + .as_os_str() + .to_str() + .ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))? + .to_string(); + Some(basedir_string.as_str()) + } + None => None, + }; + let deps = deps.iter() + .map(|f| render_filename(f, basedir)) + .collect::>>()?; + + for &(_, ref link_dst, _) in context.target_filenames(unit)?.iter() { + if let Some(ref link_dst) = *link_dst { + let output_path = link_dst.with_extension("d"); + if success { + let target_fn = render_filename(link_dst, basedir)?; + + // If nothing changed don't recreate the file which could alter + // its mtime + if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { + if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps { + continue; + } + } + + // Otherwise write it all out + let mut outfile = BufWriter::new(File::create(output_path)?); + write!(outfile, "{}:", target_fn)?; + for dep in &deps { + write!(outfile, " {}", dep)?; + } + writeln!(outfile, "")?; + + // dep-info generation failed, so delete output file. This will + // usually cause the build system to always rerun the build + // rule, which is correct if inefficient. + } else if output_path.exists() { + paths::remove_file(output_path)?; + } + } + } + Ok(()) +} diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs new file mode 100644 index 000000000..8920840c9 --- /dev/null +++ b/src/cargo/ops/cargo_test.rs @@ -0,0 +1,240 @@ +use std::ffi::{OsStr, OsString}; + +use ops::{self, Compilation}; +use util::{self, CargoTestError, ProcessError, Test}; +use util::errors::CargoResult; +use core::Workspace; + +pub struct TestOptions<'a> { + pub compile_opts: ops::CompileOptions<'a>, + pub no_run: bool, + pub no_fail_fast: bool, + pub only_doc: bool, +} + +pub fn run_tests( + ws: &Workspace, + options: &TestOptions, + test_args: &[String], +) -> CargoResult> { + let compilation = compile_tests(ws, options)?; + + if options.no_run { + return Ok(None); + } + let (test, mut errors) = if options.only_doc { + assert!(options.compile_opts.filter.is_specific()); + run_doc_tests(options, test_args, &compilation)? + } else { + run_unit_tests(options, test_args, &compilation)? + }; + + // If we have an error and want to fail fast, return + if !errors.is_empty() && !options.no_fail_fast { + return Ok(Some(CargoTestError::new(test, errors))); + } + + // If a specific test was requested or we're not running any tests at all, + // don't run any doc tests. + if options.compile_opts.filter.is_specific() { + match errors.len() { + 0 => return Ok(None), + _ => return Ok(Some(CargoTestError::new(test, errors))), + } + } + + let (doctest, docerrors) = run_doc_tests(options, test_args, &compilation)?; + let test = if docerrors.is_empty() { test } else { doctest }; + errors.extend(docerrors); + if errors.is_empty() { + Ok(None) + } else { + Ok(Some(CargoTestError::new(test, errors))) + } +} + +pub fn run_benches( + ws: &Workspace, + options: &TestOptions, + args: &[String], +) -> CargoResult> { + let mut args = args.to_vec(); + args.push("--bench".to_string()); + let compilation = compile_tests(ws, options)?; + + if options.no_run { + return Ok(None); + } + let (test, errors) = run_unit_tests(options, &args, &compilation)?; + match errors.len() { + 0 => Ok(None), + _ => Ok(Some(CargoTestError::new(test, errors))), + } +} + +fn compile_tests<'a>( + ws: &Workspace<'a>, + options: &TestOptions<'a>, +) -> CargoResult> { + let mut compilation = ops::compile(ws, &options.compile_opts)?; + compilation + .tests + .sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2))); + Ok(compilation) +} + +/// Run the unit and integration tests of a project. +fn run_unit_tests( + options: &TestOptions, + test_args: &[String], + compilation: &Compilation, +) -> CargoResult<(Test, Vec)> { + let config = options.compile_opts.config; + let cwd = options.compile_opts.config.cwd(); + + let mut errors = Vec::new(); + + for &(ref pkg, ref kind, ref test, ref exe) in &compilation.tests { + let to_display = match util::without_prefix(exe, cwd) { + Some(path) => path, + None => &**exe, + }; + let mut cmd = compilation.target_process(exe, pkg)?; + cmd.args(test_args); + config + .shell() + .concise(|shell| shell.status("Running", to_display.display().to_string()))?; + config + .shell() + .verbose(|shell| shell.status("Running", cmd.to_string()))?; + + let result = cmd.exec(); + + match result { + Err(e) => { + let e = e.downcast::()?; + errors.push((kind.clone(), test.clone(), pkg.name().to_string(), e)); + if !options.no_fail_fast { + break; + } + } + Ok(()) => {} + } + } + + if errors.len() == 1 { + let (kind, name, pkg_name, e) = errors.pop().unwrap(); + Ok(( + Test::UnitTest { + kind, + name, + pkg_name, + }, + vec![e], + )) + } else { + Ok(( + Test::Multiple, + errors.into_iter().map(|(_, _, _, e)| e).collect(), + )) + } +} + +fn run_doc_tests( + options: &TestOptions, + test_args: &[String], + compilation: &Compilation, +) -> CargoResult<(Test, Vec)> { + let mut errors = Vec::new(); + let config = options.compile_opts.config; + + // We don't build/rust doctests if target != host + if config.rustc()?.host != compilation.target { + return Ok((Test::Doc, errors)); + } + + let libs = compilation.to_doc_test.iter().map(|package| { + ( + package, + package + .targets() + .iter() + .filter(|t| t.doctested()) + .map(|t| (t.src_path(), t.name(), t.crate_name())), + ) + }); + + for (package, tests) in libs { + for (lib, name, crate_name) in tests { + config.shell().status("Doc-tests", name)?; + let mut p = compilation.rustdoc_process(package)?; + p.arg("--test") + .arg(lib) + .arg("--crate-name") + .arg(&crate_name); + + for &rust_dep in &[&compilation.deps_output] { + let mut arg = OsString::from("dependency="); + arg.push(rust_dep); + p.arg("-L").arg(arg); + } + + for native_dep in compilation.native_dirs.iter() { + p.arg("-L").arg(native_dep); + } + + for &host_rust_dep in &[&compilation.host_deps_output] { + let mut arg = OsString::from("dependency="); + arg.push(host_rust_dep); + p.arg("-L").arg(arg); + } + + for arg in test_args { + p.arg("--test-args").arg(arg); + } + + if let Some(cfgs) = compilation.cfgs.get(package.package_id()) { + for cfg in cfgs.iter() { + p.arg("--cfg").arg(cfg); + } + } + + let libs = &compilation.libraries[package.package_id()]; + for &(ref target, ref lib) in libs.iter() { + // Note that we can *only* doctest rlib outputs here. A + // staticlib output cannot be linked by the compiler (it just + // doesn't do that). A dylib output, however, can be linked by + // the compiler, but will always fail. Currently all dylibs are + // built as "static dylibs" where the standard library is + // statically linked into the dylib. The doc tests fail, + // however, for now as they try to link the standard library + // dynamically as well, causing problems. As a result we only + // pass `--extern` for rlib deps and skip out on all other + // artifacts. + if lib.extension() != Some(OsStr::new("rlib")) && !target.for_host() { + continue; + } + let mut arg = OsString::from(target.crate_name()); + arg.push("="); + arg.push(lib); + p.arg("--extern").arg(&arg); + } + + if let Some(flags) = compilation.rustdocflags.get(package.package_id()) { + p.args(flags); + } + + config + .shell() + .verbose(|shell| shell.status("Running", p.to_string()))?; + if let Err(e) = p.exec() { + let e = e.downcast::()?; + errors.push(e); + if !options.no_fail_fast { + return Ok((Test::Doc, errors)); + } + } + } + } + Ok((Test::Doc, errors)) +} diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs new file mode 100644 index 000000000..d90e5f408 --- /dev/null +++ b/src/cargo/ops/lockfile.rs @@ -0,0 +1,161 @@ +use std::io::prelude::*; + +use toml; + +use core::{resolver, Resolve, Workspace}; +use core::resolver::WorkspaceResolve; +use util::Filesystem; +use util::errors::{CargoResult, CargoResultExt}; +use util::toml as cargo_toml; + +pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { + if !ws.root().join("Cargo.lock").exists() { + return Ok(None); + } + + let root = Filesystem::new(ws.root().to_path_buf()); + let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; + + let mut s = String::new(); + f.read_to_string(&mut s) + .chain_err(|| format!("failed to read file: {}", f.path().display()))?; + + let resolve = + (|| -> CargoResult> { + let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; + let v: resolver::EncodableResolve = resolve.try_into()?; + Ok(Some(v.into_resolve(ws)?)) + })() + .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + Ok(resolve) +} + +pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> { + // Load the original lockfile if it exists. + let ws_root = Filesystem::new(ws.root().to_path_buf()); + let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); + let orig = orig.and_then(|mut f| { + let mut s = String::new(); + f.read_to_string(&mut s)?; + Ok(s) + }); + + let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap(); + + let mut out = String::new(); + + let deps = toml["package"].as_array().unwrap(); + for dep in deps.iter() { + let dep = dep.as_table().unwrap(); + + out.push_str("[[package]]\n"); + emit_package(dep, &mut out); + } + + if let Some(patch) = toml.get("patch") { + let list = patch["unused"].as_array().unwrap(); + for entry in list { + out.push_str("[[patch.unused]]\n"); + emit_package(entry.as_table().unwrap(), &mut out); + out.push_str("\n"); + } + } + + if let Some(meta) = toml.get("metadata") { + out.push_str("[metadata]\n"); + out.push_str(&meta.to_string()); + } + + // If the lockfile contents haven't changed so don't rewrite it. This is + // helpful on read-only filesystems. + if let Ok(orig) = orig { + if are_equal_lockfiles(orig, &out, ws) { + return Ok(()); + } + } + + if !ws.config().lock_update_allowed() { + if ws.config().cli_unstable().offline { + bail!("can't update in the offline mode"); + } + + let flag = if ws.config().network_allowed() { + "--locked" + } else { + "--frozen" + }; + bail!( + "the lock file needs to be updated but {} was passed to \ + prevent this", + flag + ); + } + + // Ok, if that didn't work just write it out + ws_root + .open_rw("Cargo.lock", ws.config(), "Cargo.lock file") + .and_then(|mut f| { + f.file().set_len(0)?; + f.write_all(out.as_bytes())?; + Ok(()) + }) + .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; + Ok(()) +} + +fn are_equal_lockfiles(mut orig: String, current: &str, ws: &Workspace) -> bool { + if has_crlf_line_endings(&orig) { + orig = orig.replace("\r\n", "\n"); + } + + // If we want to try and avoid updating the lockfile, parse both and + // compare them; since this is somewhat expensive, don't do it in the + // common case where we can update lockfiles. + if !ws.config().lock_update_allowed() { + let res: CargoResult = (|| { + let old: resolver::EncodableResolve = toml::from_str(&orig)?; + let new: resolver::EncodableResolve = toml::from_str(current)?; + Ok(old.into_resolve(ws)? == new.into_resolve(ws)?) + })(); + if let Ok(true) = res { + return true; + } + } + + current == orig +} + +fn has_crlf_line_endings(s: &str) -> bool { + // Only check the first line. + if let Some(lf) = s.find('\n') { + s[..lf].ends_with('\r') + } else { + false + } +} + +fn emit_package(dep: &toml::value::Table, out: &mut String) { + out.push_str(&format!("name = {}\n", &dep["name"])); + out.push_str(&format!("version = {}\n", &dep["version"])); + + if dep.contains_key("source") { + out.push_str(&format!("source = {}\n", &dep["source"])); + } + + if let Some(s) = dep.get("dependencies") { + let slice = s.as_array().unwrap(); + + if !slice.is_empty() { + out.push_str("dependencies = [\n"); + + for child in slice.iter() { + out.push_str(&format!(" {},\n", child)); + } + + out.push_str("]\n"); + } + out.push_str("\n"); + } else if dep.contains_key("replace") { + out.push_str(&format!("replace = {}\n\n", &dep["replace"])); + } +} diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs new file mode 100644 index 000000000..86822b3d1 --- /dev/null +++ b/src/cargo/ops/mod.rs @@ -0,0 +1,45 @@ +pub use self::cargo_clean::{clean, CleanOptions}; +pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOptions}; +pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages}; +pub use self::cargo_read_manifest::{read_package, read_packages}; +pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit}; +pub use self::cargo_rustc::{is_bad_artifact_name, Context}; +pub use self::cargo_rustc::{BuildConfig, BuildOutput, TargetConfig}; +pub use self::cargo_rustc::{DefaultExecutor, Executor}; +pub use self::cargo_run::run; +pub use self::cargo_install::{install, install_list, uninstall}; +pub use self::cargo_new::{init, new, NewOptions, VersionControl}; +pub use self::cargo_doc::{doc, DocOptions}; +pub use self::cargo_generate_lockfile::generate_lockfile; +pub use self::cargo_generate_lockfile::update_lockfile; +pub use self::cargo_generate_lockfile::UpdateOptions; +pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile}; +pub use self::cargo_test::{run_benches, run_tests, TestOptions}; +pub use self::cargo_package::{package, PackageOpts}; +pub use self::registry::{publish, registry_configuration, RegistryConfig}; +pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search}; +pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; +pub use self::registry::configure_http_handle; +pub use self::cargo_fetch::fetch; +pub use self::cargo_pkgid::pkgid; +pub use self::resolve::{resolve_with_previous, resolve_ws, resolve_ws_precisely, + resolve_ws_with_method}; +pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions}; + +mod cargo_clean; +mod cargo_compile; +mod cargo_doc; +mod cargo_fetch; +mod cargo_generate_lockfile; +mod cargo_install; +mod cargo_new; +mod cargo_output_metadata; +mod cargo_package; +mod cargo_pkgid; +mod cargo_read_manifest; +mod cargo_run; +mod cargo_rustc; +mod cargo_test; +mod lockfile; +mod registry; +mod resolve; diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs new file mode 100644 index 000000000..ed997ccb2 --- /dev/null +++ b/src/cargo/ops/registry.rs @@ -0,0 +1,609 @@ +use std::{cmp, env}; +use std::fs::{self, File}; +use std::iter::repeat; +use std::time::Duration; + +use curl::easy::{Easy, SslOpt}; +use git2; +use registry::{NewCrate, NewCrateDependency, Registry}; + +use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; + +use version; +use core::source::Source; +use core::{Package, SourceId, Workspace}; +use core::dependency::Kind; +use core::manifest::ManifestMetadata; +use ops; +use sources::RegistrySource; +use util::config::{self, Config}; +use util::paths; +use util::ToUrl; +use util::errors::{CargoResult, CargoResultExt}; +use util::important_paths::find_root_manifest_for_wd; + +pub struct RegistryConfig { + pub index: Option, + pub token: Option, +} + +pub struct PublishOpts<'cfg> { + pub config: &'cfg Config, + pub token: Option, + pub index: Option, + pub verify: bool, + pub allow_dirty: bool, + pub jobs: Option, + pub target: Option, + pub dry_run: bool, + pub registry: Option, +} + +pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { + let pkg = ws.current()?; + + // Allow publishing if a registry has been provided, or if there are no nightly + // features enabled. + if opts.registry.is_none() && !pkg.manifest().features().activated().is_empty() { + bail!("cannot publish crates which activate nightly-only cargo features to crates.io") + } + + if let Some(ref allowed_registries) = *pkg.publish() { + if !match opts.registry { + Some(ref registry) => allowed_registries.contains(registry), + None => false, + } { + bail!( + "some crates cannot be published.\n\ + `{}` is marked as unpublishable", + pkg.name() + ); + } + } + + if !pkg.manifest().patch().is_empty() { + bail!("published crates cannot contain [patch] sections"); + } + + let (mut registry, reg_id) = registry( + opts.config, + opts.token.clone(), + opts.index.clone(), + opts.registry.clone(), + )?; + verify_dependencies(pkg, ®_id)?; + + // Prepare a tarball, with a non-surpressable warning if metadata + // is missing since this is being put online. + let tarball = ops::package( + ws, + &ops::PackageOpts { + config: opts.config, + verify: opts.verify, + list: false, + check_metadata: true, + allow_dirty: opts.allow_dirty, + target: opts.target.clone(), + jobs: opts.jobs, + registry: opts.registry.clone(), + }, + )?.unwrap(); + + // Upload said tarball to the specified destination + opts.config + .shell() + .status("Uploading", pkg.package_id().to_string())?; + transmit( + opts.config, + pkg, + tarball.file(), + &mut registry, + ®_id, + opts.dry_run, + )?; + + Ok(()) +} + +fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> { + for dep in pkg.dependencies().iter() { + if dep.source_id().is_path() { + if !dep.specified_req() { + bail!( + "all path dependencies must have a version specified \ + when publishing.\ndependency `{}` does not specify \ + a version", + dep.name() + ) + } + } else if dep.source_id() != registry_src { + if dep.source_id().is_registry() { + // Block requests to send to a registry if it is not an alternative + // registry + if !registry_src.is_alt_registry() { + bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ + registries either publish `{}` on crates.io or pull it into this repository\n\ + and specify it with a path and version\n\ + (crate `{}` is pulled from {})", dep.name(), dep.name(), dep.source_id()); + } + } else { + bail!( + "crates cannot be published to crates.io with dependencies sourced from \ + a repository\neither publish `{}` as its own crate on crates.io and \ + specify a crates.io version as a dependency or pull it into this \ + repository and specify it with a path and version\n(crate `{}` has \ + repository path `{}`)", + dep.name(), + dep.name(), + dep.source_id() + ); + } + } + } + Ok(()) +} + +fn transmit( + config: &Config, + pkg: &Package, + tarball: &File, + registry: &mut Registry, + registry_id: &SourceId, + dry_run: bool, +) -> CargoResult<()> { + let deps = pkg.dependencies() + .iter() + .map(|dep| { + // If the dependency is from a different registry, then include the + // registry in the dependency. + let dep_registry_id = match dep.registry_id() { + Some(id) => id, + None => bail!("dependency missing registry ID"), + }; + let dep_registry = if dep_registry_id != registry_id { + Some(dep_registry_id.url().to_string()) + } else { + None + }; + + Ok(NewCrateDependency { + optional: dep.is_optional(), + default_features: dep.uses_default_features(), + name: dep.name().to_string(), + features: dep.features().to_vec(), + version_req: dep.version_req().to_string(), + target: dep.platform().map(|s| s.to_string()), + kind: match dep.kind() { + Kind::Normal => "normal", + Kind::Build => "build", + Kind::Development => "dev", + }.to_string(), + registry: dep_registry, + }) + }) + .collect::>>()?; + let manifest = pkg.manifest(); + let ManifestMetadata { + ref authors, + ref description, + ref homepage, + ref documentation, + ref keywords, + ref readme, + ref repository, + ref license, + ref license_file, + ref categories, + ref badges, + ref links, + } = *manifest.metadata(); + let readme_content = match *readme { + Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), + None => None, + }; + if let Some(ref file) = *license_file { + if fs::metadata(&pkg.root().join(file)).is_err() { + bail!("the license file `{}` does not exist", file) + } + } + + // Do not upload if performing a dry run + if dry_run { + config.shell().warn("aborting upload due to dry run")?; + return Ok(()); + } + + let publish = registry.publish( + &NewCrate { + name: pkg.name().to_string(), + vers: pkg.version().to_string(), + deps, + features: pkg.summary().features().clone(), + authors: authors.clone(), + description: description.clone(), + homepage: homepage.clone(), + documentation: documentation.clone(), + keywords: keywords.clone(), + categories: categories.clone(), + readme: readme_content, + readme_file: readme.clone(), + repository: repository.clone(), + license: license.clone(), + license_file: license_file.clone(), + badges: badges.clone(), + links: links.clone(), + }, + tarball, + ); + + match publish { + Ok(warnings) => { + if !warnings.invalid_categories.is_empty() { + let msg = format!( + "\ + the following are not valid category slugs and were \ + ignored: {}. Please see https://crates.io/category_slugs \ + for the list of all category slugs. \ + ", + warnings.invalid_categories.join(", ") + ); + config.shell().warn(&msg)?; + } + + if !warnings.invalid_badges.is_empty() { + let msg = format!( + "\ + the following are not valid badges and were ignored: {}. \ + Either the badge type specified is unknown or a required \ + attribute is missing. Please see \ + http://doc.crates.io/manifest.html#package-metadata \ + for valid badge types and their required attributes.", + warnings.invalid_badges.join(", ") + ); + config.shell().warn(&msg)?; + } + + Ok(()) + } + Err(e) => Err(e), + } +} + +pub fn registry_configuration( + config: &Config, + registry: Option, +) -> CargoResult { + let (index, token) = match registry { + Some(registry) => ( + Some(config.get_registry_index(®istry)?.to_string()), + config + .get_string(&format!("registries.{}.token", registry))? + .map(|p| p.val), + ), + None => { + // Checking out for default index and token + ( + config.get_string("registry.index")?.map(|p| p.val), + config.get_string("registry.token")?.map(|p| p.val), + ) + } + }; + + Ok(RegistryConfig { index, token }) +} + +pub fn registry( + config: &Config, + token: Option, + index: Option, + registry: Option, +) -> CargoResult<(Registry, SourceId)> { + // Parse all configuration options + let RegistryConfig { + token: token_config, + index: index_config, + } = registry_configuration(config, registry.clone())?; + let token = token.or(token_config); + let sid = match (index_config, index, registry) { + (_, _, Some(registry)) => SourceId::alt_registry(config, ®istry)?, + (Some(index), _, _) | (None, Some(index), _) => SourceId::for_registry(&index.to_url()?)?, + (None, None, _) => SourceId::crates_io(config)?, + }; + let api_host = { + let mut src = RegistrySource::remote(&sid, config); + src.update() + .chain_err(|| format!("failed to update {}", sid))?; + (src.config()?).unwrap().api.unwrap() + }; + let handle = http_handle(config)?; + Ok((Registry::new_handle(api_host, token, handle), sid)) +} + +/// Create a new HTTP handle with appropriate global configuration for cargo. +pub fn http_handle(config: &Config) -> CargoResult { + if config.frozen() { + bail!( + "attempting to make an HTTP request, but --frozen was \ + specified" + ) + } + if !config.network_allowed() { + bail!("can't make HTTP request in the offline mode") + } + + // The timeout option for libcurl by default times out the entire transfer, + // but we probably don't want this. Instead we only set timeouts for the + // connect phase as well as a "low speed" timeout so if we don't receive + // many bytes in a large-ish period of time then we time out. + let mut handle = Easy::new(); + configure_http_handle(config, &mut handle)?; + Ok(handle) +} + +pub fn needs_custom_http_transport(config: &Config) -> CargoResult { + let proxy_exists = http_proxy_exists(config)?; + let timeout = http_timeout(config)?; + let cainfo = config.get_path("http.cainfo")?; + let check_revoke = config.get_bool("http.check-revoke")?; + + Ok(proxy_exists || timeout.is_some() || cainfo.is_some() || check_revoke.is_some()) +} + +/// Configure a libcurl http handle with the defaults options for Cargo +pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<()> { + // The timeout option for libcurl by default times out the entire transfer, + // but we probably don't want this. Instead we only set timeouts for the + // connect phase as well as a "low speed" timeout so if we don't receive + // many bytes in a large-ish period of time then we time out. + handle.connect_timeout(Duration::new(30, 0))?; + handle.low_speed_limit(10 /* bytes per second */)?; + handle.low_speed_time(Duration::new(30, 0))?; + handle.useragent(&version().to_string())?; + if let Some(proxy) = http_proxy(config)? { + handle.proxy(&proxy)?; + } + if let Some(cainfo) = config.get_path("http.cainfo")? { + handle.cainfo(&cainfo.val)?; + } + if let Some(check) = config.get_bool("http.check-revoke")? { + handle.ssl_options(SslOpt::new().no_revoke(!check.val))?; + } + if let Some(timeout) = http_timeout(config)? { + handle.connect_timeout(Duration::new(timeout as u64, 0))?; + handle.low_speed_time(Duration::new(timeout as u64, 0))?; + } + Ok(()) +} + +/// Find an explicit HTTP proxy if one is available. +/// +/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified +/// via environment variables are picked up by libcurl. +fn http_proxy(config: &Config) -> CargoResult> { + if let Some(s) = config.get_string("http.proxy")? { + return Ok(Some(s.val)); + } + if let Ok(cfg) = git2::Config::open_default() { + if let Ok(s) = cfg.get_str("http.proxy") { + return Ok(Some(s.to_string())); + } + } + Ok(None) +} + +/// Determine if an http proxy exists. +/// +/// Checks the following for existence, in order: +/// +/// * cargo's `http.proxy` +/// * git's `http.proxy` +/// * `http_proxy` env var +/// * `HTTP_PROXY` env var +/// * `https_proxy` env var +/// * `HTTPS_PROXY` env var +fn http_proxy_exists(config: &Config) -> CargoResult { + if http_proxy(config)?.is_some() { + Ok(true) + } else { + Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"] + .iter() + .any(|v| env::var(v).is_ok())) + } +} + +fn http_timeout(config: &Config) -> CargoResult> { + if let Some(s) = config.get_i64("http.timeout")? { + return Ok(Some(s.val)); + } + Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) +} + +pub fn registry_login(config: &Config, token: String, registry: Option) -> CargoResult<()> { + let RegistryConfig { + token: old_token, .. + } = registry_configuration(config, registry.clone())?; + + if let Some(old_token) = old_token { + if old_token == token { + return Ok(()); + } + } + + config::save_credentials(config, token, registry) +} + +pub struct OwnersOptions { + pub krate: Option, + pub token: Option, + pub index: Option, + pub to_add: Option>, + pub to_remove: Option>, + pub list: bool, + pub registry: Option, +} + +pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { + let name = match opts.krate { + Some(ref name) => name.clone(), + None => { + let manifest_path = find_root_manifest_for_wd(config.cwd())?; + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() + } + }; + + let (mut registry, _) = registry( + config, + opts.token.clone(), + opts.index.clone(), + opts.registry.clone(), + )?; + + if let Some(ref v) = opts.to_add { + let v = v.iter().map(|s| &s[..]).collect::>(); + let msg = registry + .add_owners(&name, &v) + .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?; + + config.shell().status("Owner", msg)?; + } + + if let Some(ref v) = opts.to_remove { + let v = v.iter().map(|s| &s[..]).collect::>(); + config + .shell() + .status("Owner", format!("removing {:?} from crate {}", v, name))?; + registry + .remove_owners(&name, &v) + .chain_err(|| format!("failed to remove owners from crate {}", name))?; + } + + if opts.list { + let owners = registry + .list_owners(&name) + .chain_err(|| format!("failed to list owners of crate {}", name))?; + for owner in owners.iter() { + print!("{}", owner.login); + match (owner.name.as_ref(), owner.email.as_ref()) { + (Some(name), Some(email)) => println!(" ({} <{}>)", name, email), + (Some(s), None) | (None, Some(s)) => println!(" ({})", s), + (None, None) => println!(), + } + } + } + + Ok(()) +} + +pub fn yank( + config: &Config, + krate: Option, + version: Option, + token: Option, + index: Option, + undo: bool, + reg: Option, +) -> CargoResult<()> { + let name = match krate { + Some(name) => name, + None => { + let manifest_path = find_root_manifest_for_wd(config.cwd())?; + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() + } + }; + let version = match version { + Some(v) => v, + None => bail!("a version must be specified to yank"), + }; + + let (mut registry, _) = registry(config, token, index, reg)?; + + if undo { + config + .shell() + .status("Unyank", format!("{}:{}", name, version))?; + registry + .unyank(&name, &version) + .chain_err(|| "failed to undo a yank")?; + } else { + config + .shell() + .status("Yank", format!("{}:{}", name, version))?; + registry + .yank(&name, &version) + .chain_err(|| "failed to yank")?; + } + + Ok(()) +} + +pub fn search( + query: &str, + config: &Config, + index: Option, + limit: u32, + reg: Option, +) -> CargoResult<()> { + fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { + // We should truncate at grapheme-boundary and compute character-widths, + // yet the dependencies on unicode-segmentation and unicode-width are + // not worth it. + let mut chars = s.chars(); + let mut prefix = (&mut chars).take(max_width - 1).collect::(); + if chars.next().is_some() { + prefix.push('…'); + } + prefix + } + + let (mut registry, _) = registry(config, None, index, reg)?; + let (crates, total_crates) = registry + .search(query, limit) + .chain_err(|| "failed to retrieve search results from the registry")?; + + let names = crates + .iter() + .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) + .collect::>(); + + let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default(); + + let description_length = cmp::max(80, 128 - description_margin); + + let descriptions = crates.iter().map(|krate| { + krate + .description + .as_ref() + .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length)) + }); + + for (name, description) in names.into_iter().zip(descriptions) { + let line = match description { + Some(desc) => { + let space = repeat(' ') + .take(description_margin - name.len()) + .collect::(); + name + &space + "# " + &desc + } + None => name, + }; + println!("{}", line); + } + + let search_max_limit = 100; + if total_crates > u32::from(limit) && limit < search_max_limit { + println!( + "... and {} crates more (use --limit N to see more)", + total_crates - u32::from(limit) + ); + } else if total_crates > u32::from(limit) && limit >= search_max_limit { + println!( + "... and {} crates more (go to http://crates.io/search?q={} to see more)", + total_crates - u32::from(limit), + percent_encode(query.as_bytes(), QUERY_ENCODE_SET) + ); + } + + Ok(()) +} diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs new file mode 100644 index 000000000..c9f1e5a3d --- /dev/null +++ b/src/cargo/ops/resolve.rs @@ -0,0 +1,515 @@ +use std::collections::HashSet; + +use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; +use core::registry::PackageRegistry; +use core::resolver::{self, Method, Resolve}; +use sources::PathSource; +use ops; +use util::profile; +use util::errors::{CargoResult, CargoResultExt}; + +/// Resolve all dependencies for the workspace using the previous +/// lockfile as a guide if present. +/// +/// This function will also write the result of resolution as a new +/// lockfile. +pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = resolve_with_registry(ws, &mut registry, true)?; + let packages = get_resolved_packages(&resolve, registry); + Ok((packages, resolve)) +} + +/// Resolves dependencies for some packages of the workspace, +/// taking into account `paths` overrides and activated features. +pub fn resolve_ws_precisely<'a>( + ws: &Workspace<'a>, + source: Option>, + features: &[String], + all_features: bool, + no_default_features: bool, + specs: &[PackageIdSpec], +) -> CargoResult<(PackageSet<'a>, Resolve)> { + let features = Method::split_features(features); + let method = if all_features { + Method::Everything + } else { + Method::Required { + dev_deps: true, + features: &features, + all_features: false, + uses_default_features: !no_default_features, + } + }; + resolve_ws_with_method(ws, source, method, specs) +} + +pub fn resolve_ws_with_method<'a>( + ws: &Workspace<'a>, + source: Option>, + method: Method, + specs: &[PackageIdSpec], +) -> CargoResult<(PackageSet<'a>, Resolve)> { + let mut registry = PackageRegistry::new(ws.config())?; + if let Some(source) = source { + registry.add_preloaded(source); + } + let mut add_patches = true; + + let resolve = if ws.require_optional_deps() { + // First, resolve the root_package's *listed* dependencies, as well as + // downloading and updating all remotes and such. + let resolve = resolve_with_registry(ws, &mut registry, false)?; + add_patches = false; + + // Second, resolve with precisely what we're doing. Filter out + // transitive dependencies if necessary, specify features, handle + // overrides, etc. + let _p = profile::start("resolving w/ overrides..."); + + add_overrides(&mut registry, ws)?; + + for &(ref replace_spec, ref dep) in ws.root_replace() { + if !resolve + .iter() + .any(|r| replace_spec.matches(r) && !dep.matches_id(r)) + { + ws.config() + .shell() + .warn(format!("package replacement is not used: {}", replace_spec))? + } + } + + Some(resolve) + } else { + ops::load_pkg_lockfile(ws)? + }; + + let resolved_with_overrides = ops::resolve_with_previous( + &mut registry, + ws, + method, + resolve.as_ref(), + None, + specs, + add_patches, + true, + )?; + + let packages = get_resolved_packages(&resolved_with_overrides, registry); + + Ok((packages, resolved_with_overrides)) +} + +fn resolve_with_registry<'cfg>( + ws: &Workspace<'cfg>, + registry: &mut PackageRegistry<'cfg>, + warn: bool, +) -> CargoResult { + let prev = ops::load_pkg_lockfile(ws)?; + let resolve = resolve_with_previous( + registry, + ws, + Method::Everything, + prev.as_ref(), + None, + &[], + true, + warn, + )?; + + if !ws.is_ephemeral() { + ops::write_pkg_lockfile(ws, &resolve)?; + } + Ok(resolve) +} + +/// Resolve all dependencies for a package using an optional previous instance +/// of resolve to guide the resolution process. +/// +/// This also takes an optional hash set, `to_avoid`, which is a list of package +/// ids that should be avoided when consulting the previous instance of resolve +/// (often used in pairings with updates). +/// +/// The previous resolve normally comes from a lockfile. This function does not +/// read or write lockfiles from the filesystem. +pub fn resolve_with_previous<'a, 'cfg>( + registry: &mut PackageRegistry<'cfg>, + ws: &Workspace<'cfg>, + method: Method, + previous: Option<&'a Resolve>, + to_avoid: Option<&HashSet<&'a PackageId>>, + specs: &[PackageIdSpec], + register_patches: bool, + warn: bool, +) -> CargoResult { + // Here we place an artificial limitation that all non-registry sources + // cannot be locked at more than one revision. This means that if a git + // repository provides more than one package, they must all be updated in + // step when any of them are updated. + // + // TODO: This seems like a hokey reason to single out the registry as being + // different + let mut to_avoid_sources = HashSet::new(); + if let Some(to_avoid) = to_avoid { + to_avoid_sources.extend( + to_avoid + .iter() + .map(|p| p.source_id()) + .filter(|s| !s.is_registry()), + ); + } + + let ref keep = |p: &&'a PackageId| { + !to_avoid_sources.contains(&p.source_id()) && match to_avoid { + Some(set) => !set.contains(p), + None => true, + } + }; + + // In the case where a previous instance of resolve is available, we + // want to lock as many packages as possible to the previous version + // without disturbing the graph structure. + let mut try_to_use = HashSet::new(); + if let Some(r) = previous { + trace!("previous: {:?}", r); + register_previous_locks(ws, registry, r, keep); + + // Everything in the previous lock file we want to keep is prioritized + // in dependency selection if it comes up, aka we want to have + // conservative updates. + try_to_use.extend(r.iter().filter(keep).inspect(|id| { + debug!("attempting to prefer {}", id); + })); + } + + if register_patches { + for (url, patches) in ws.root_patch() { + let previous = match previous { + Some(r) => r, + None => { + registry.patch(url, patches)?; + continue; + } + }; + let patches = patches + .iter() + .map(|dep| { + let unused = previous.unused_patches(); + let candidates = previous.iter().chain(unused); + match candidates.filter(keep).find(|id| dep.matches_id(id)) { + Some(id) => { + let mut dep = dep.clone(); + dep.lock_to(id); + dep + } + None => dep.clone(), + } + }) + .collect::>(); + registry.patch(url, &patches)?; + } + + registry.lock_patches(); + } + + let mut summaries = Vec::new(); + for member in ws.members() { + registry.add_sources(&[member.package_id().source_id().clone()])?; + let method_to_resolve = match method { + // When everything for a workspace we want to be sure to resolve all + // members in the workspace, so propagate the `Method::Everything`. + Method::Everything => Method::Everything, + + // If we're not resolving everything though then we're constructing the + // exact crate graph we're going to build. Here we don't necessarily + // want to keep around all workspace crates as they may not all be + // built/tested. + // + // Additionally, the `method` specified represents command line + // flags, which really only matters for the current package + // (determined by the cwd). If other packages are specified (via + // `-p`) then the command line flags like features don't apply to + // them. + // + // As a result, if this `member` is the current member of the + // workspace, then we use `method` specified. Otherwise we use a + // base method with no features specified but using default features + // for any other packages specified with `-p`. + Method::Required { dev_deps, .. } => { + let base = Method::Required { + dev_deps, + features: &[], + all_features: false, + uses_default_features: true, + }; + let member_id = member.package_id(); + match ws.current_opt() { + Some(current) if member_id == current.package_id() => method, + _ => { + if specs.iter().any(|spec| spec.matches(member_id)) { + base + } else { + continue; + } + } + } + } + }; + + let summary = registry.lock(member.summary().clone()); + summaries.push((summary, method_to_resolve)); + } + + let root_replace = ws.root_replace(); + + let replace = match previous { + Some(r) => root_replace + .iter() + .map(|&(ref spec, ref dep)| { + for (key, val) in r.replacements().iter() { + if spec.matches(key) && dep.matches_id(val) && keep(&val) { + let mut dep = dep.clone(); + dep.lock_to(val); + return (spec.clone(), dep); + } + } + (spec.clone(), dep.clone()) + }) + .collect::>(), + None => root_replace.to_vec(), + }; + + ws.preload(registry); + let mut resolved = resolver::resolve( + &summaries, + &replace, + registry, + &try_to_use, + Some(ws.config()), + warn, + )?; + resolved.register_used_patches(registry.patches()); + if let Some(previous) = previous { + resolved.merge_from(previous)?; + } + Ok(resolved) +} + +/// Read the `paths` configuration variable to discover all path overrides that +/// have been configured. +fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>) -> CargoResult<()> { + let paths = match ws.config().get_list("paths")? { + Some(list) => list, + None => return Ok(()), + }; + + let paths = paths.val.iter().map(|&(ref s, ref p)| { + // The path listed next to the string is the config file in which the + // key was located, so we want to pop off the `.cargo/config` component + // to get the directory containing the `.cargo` folder. + (p.parent().unwrap().parent().unwrap().join(s), p) + }); + + for (path, definition) in paths { + let id = SourceId::for_path(&path)?; + let mut source = PathSource::new_recursive(&path, &id, ws.config()); + source.update().chain_err(|| { + format!( + "failed to update path override `{}` \ + (defined in `{}`)", + path.display(), + definition.display() + ) + })?; + registry.add_override(Box::new(source)); + } + Ok(()) +} + +fn get_resolved_packages<'a>(resolve: &Resolve, registry: PackageRegistry<'a>) -> PackageSet<'a> { + let ids: Vec = resolve.iter().cloned().collect(); + registry.get(&ids) +} + +/// In this function we're responsible for informing the `registry` of all +/// locked dependencies from the previous lock file we had, `resolve`. +/// +/// This gets particularly tricky for a couple of reasons. The first is that we +/// want all updates to be conservative, so we actually want to take the +/// `resolve` into account (and avoid unnecessary registry updates and such). +/// the second, however, is that we want to be resilient to updates of +/// manifests. For example if a dependency is added or a version is changed we +/// want to make sure that we properly re-resolve (conservatively) instead of +/// providing an opaque error. +/// +/// The logic here is somewhat subtle but there should be more comments below to +/// help out, and otherwise feel free to ask on IRC if there's questions! +/// +/// Note that this function, at the time of this writing, is basically the +/// entire fix for #4127 +fn register_previous_locks<'a>( + ws: &Workspace, + registry: &mut PackageRegistry, + resolve: &'a Resolve, + keep: &Fn(&&'a PackageId) -> bool, +) { + let path_pkg = |id: &SourceId| { + if !id.is_path() { + return None; + } + if let Ok(path) = id.url().to_file_path() { + if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) { + return Some(pkg); + } + } + None + }; + + // Ok so we've been passed in a `keep` function which basically says "if I + // return true then this package wasn't listed for an update on the command + // line". AKA if we run `cargo update -p foo` then `keep(bar)` will return + // `true`, whereas `keep(foo)` will return `true` (roughly). + // + // This isn't actually quite what we want, however. Instead we want to + // further refine this `keep` function with *all transitive dependencies* of + // the packages we're not keeping. For example consider a case like this: + // + // * There's a crate `log` + // * There's a crate `serde` which depends on `log` + // + // Let's say we then run `cargo update -p serde`. This may *also* want to + // update the `log` dependency as our newer version of `serde` may have a + // new minimum version required for `log`. Now this isn't always guaranteed + // to work. What'll happen here is we *won't* lock the `log` dependency nor + // the `log` crate itself, but we will inform the registry "please prefer + // this version of `log`". That way if our newer version of serde works with + // the older version of `log`, we conservatively won't update `log`. If, + // however, nothing else in the dependency graph depends on `log` and the + // newer version of `serde` requires a new version of `log` it'll get pulled + // in (as we didn't accidentally lock it to an old version). + // + // Additionally here we process all path dependencies listed in the previous + // resolve. They can not only have their dependencies change but also + // the versions of the package change as well. If this ends up happening + // then we want to make sure we don't lock a package id node that doesn't + // actually exist. Note that we don't do transitive visits of all the + // package's dependencies here as that'll be covered below to poison those + // if they changed. + let mut avoid_locking = HashSet::new(); + for node in resolve.iter() { + if !keep(&node) { + add_deps(resolve, node, &mut avoid_locking); + } else if let Some(pkg) = path_pkg(node.source_id()) { + if pkg.package_id() != node { + avoid_locking.insert(node); + } + } + } + + // Ok but the above loop isn't the entire story! Updates to the dependency + // graph can come from two locations, the `cargo update` command or + // manifests themselves. For example a manifest on the filesystem may + // have been updated to have an updated version requirement on `serde`. In + // this case both `keep(serde)` and `keep(log)` return `true` (the `keep` + // that's an argument to this function). We, however, don't want to keep + // either of those! Otherwise we'll get obscure resolve errors about locked + // versions. + // + // To solve this problem we iterate over all packages with path sources + // (aka ones with manifests that are changing) and take a look at all of + // their dependencies. If any dependency does not match something in the + // previous lock file, then we're guaranteed that the main resolver will + // update the source of this dependency no matter what. Knowing this we + // poison all packages from the same source, forcing them all to get + // updated. + // + // This may seem like a heavy hammer, and it is! It means that if you change + // anything from crates.io then all of crates.io becomes unlocked. Note, + // however, that we still want conservative updates. This currently happens + // because the first candidate the resolver picks is the previously locked + // version, and only if that fails to activate to we move on and try + // a different version. (giving the guise of conservative updates) + // + // For example let's say we had `serde = "0.1"` written in our lock file. + // When we later edit this to `serde = "0.1.3"` we don't want to lock serde + // at its old version, 0.1.1. Instead we want to allow it to update to + // `0.1.3` and update its own dependencies (like above). To do this *all + // crates from crates.io* are not locked (aka added to `avoid_locking`). + // For dependencies like `log` their previous version in the lock file will + // come up first before newer version, if newer version are available. + let mut path_deps = ws.members().cloned().collect::>(); + let mut visited = HashSet::new(); + while let Some(member) = path_deps.pop() { + if !visited.insert(member.package_id().clone()) { + continue; + } + for dep in member.dependencies() { + // If this dependency didn't match anything special then we may want + // to poison the source as it may have been added. If this path + // dependencies is *not* a workspace member, however, and it's an + // optional/non-transitive dependency then it won't be necessarily + // be in our lock file. If this shows up then we avoid poisoning + // this source as otherwise we'd repeatedly update the registry. + // + // TODO: this breaks adding an optional dependency in a + // non-workspace member and then simultaneously editing the + // dependency on that crate to enable the feature. For now + // this bug is better than the always updating registry + // though... + if !ws.members().any(|pkg| pkg.package_id() == member.package_id()) && + (dep.is_optional() || !dep.is_transitive()) { + continue + } + + // If this is a path dependency then try to push it onto our + // worklist + if let Some(pkg) = path_pkg(dep.source_id()) { + path_deps.push(pkg); + continue; + } + + // If we match *anything* in the dependency graph then we consider + // ourselves A-OK and assume that we'll resolve to that. + if resolve.iter().any(|id| dep.matches_ignoring_source(id)) { + continue; + } + + // Ok if nothing matches, then we poison the source of this + // dependencies and the previous lock file. + debug!("poisoning {} because {} looks like it changed {}", + dep.source_id(), + member.package_id(), + dep.name()); + for id in resolve.iter().filter(|id| id.source_id() == dep.source_id()) { + add_deps(resolve, id, &mut avoid_locking); + } + } + } + + // Alright now that we've got our new, fresh, shiny, and refined `keep` + // function let's put it to action. Take a look at the previous lockfile, + // filter everything by this callback, and then shove everything else into + // the registry as a locked dependency. + let ref keep = |id: &&'a PackageId| keep(id) && !avoid_locking.contains(id); + + for node in resolve.iter().filter(keep) { + let deps = resolve + .deps_not_replaced(node) + .filter(keep) + .cloned() + .collect(); + registry.register_lock(node.clone(), deps); + } + + /// recursively add `node` and all its transitive dependencies to `set` + fn add_deps<'a>(resolve: &'a Resolve, node: &'a PackageId, set: &mut HashSet<&'a PackageId>) { + if !set.insert(node) { + return; + } + debug!("ignoring any lock pointing directly at {}", node); + for dep in resolve.deps_not_replaced(node) { + add_deps(resolve, dep, set); + } + } +} diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs new file mode 100644 index 000000000..991c3c631 --- /dev/null +++ b/src/cargo/sources/config.rs @@ -0,0 +1,244 @@ +//! Implementation of configuration for various sources +//! +//! This module will parse the various `source.*` TOML configuration keys into a +//! structure usable by Cargo itself. Currently this is primarily used to map +//! sources to one another via the `replace-with` key in `.cargo/config`. + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use url::Url; + +use core::{GitReference, Source, SourceId}; +use sources::ReplacedSource; +use util::{Config, ToUrl}; +use util::config::ConfigValue; +use util::errors::{CargoResult, CargoResultExt}; + +#[derive(Clone)] +pub struct SourceConfigMap<'cfg> { + cfgs: HashMap, + id2name: HashMap, + config: &'cfg Config, +} + +/// Configuration for a particular source, found in TOML looking like: +/// +/// ```toml +/// [source.crates-io] +/// registry = 'https://github.com/rust-lang/crates.io-index' +/// replace-with = 'foo' # optional +/// ``` +#[derive(Clone)] +struct SourceConfig { + // id this source corresponds to, inferred from the various defined keys in + // the configuration + id: SourceId, + + // Name of the source that this source should be replaced with. This field + // is a tuple of (name, path) where path is where this configuration key was + // defined (the literal `.cargo/config` file). + replace_with: Option<(String, PathBuf)>, +} + +impl<'cfg> SourceConfigMap<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap::empty(config)?; + if let Some(table) = config.get_table("source")? { + for (key, value) in table.val.iter() { + base.add_config(key, value)?; + } + } + Ok(base) + } + + pub fn empty(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap { + cfgs: HashMap::new(), + id2name: HashMap::new(), + config, + }; + base.add( + "crates-io", + SourceConfig { + id: SourceId::crates_io(config)?, + replace_with: None, + }, + ); + Ok(base) + } + + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn load(&self, id: &SourceId) -> CargoResult> { + debug!("loading: {}", id); + let mut name = match self.id2name.get(id) { + Some(name) => name, + None => return Ok(id.load(self.config)?), + }; + let mut path = Path::new("/"); + let orig_name = name; + let new_id; + loop { + let cfg = match self.cfgs.get(name) { + Some(cfg) => cfg, + None => bail!( + "could not find a configured source with the \ + name `{}` when attempting to lookup `{}` \ + (configuration in `{}`)", + name, + orig_name, + path.display() + ), + }; + match cfg.replace_with { + Some((ref s, ref p)) => { + name = s; + path = p; + } + None if *id == cfg.id => return Ok(id.load(self.config)?), + None => { + new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); + break; + } + } + debug!("following pointer to {}", name); + if name == orig_name { + bail!( + "detected a cycle of `replace-with` sources, the source \ + `{}` is eventually replaced with itself \ + (configuration in `{}`)", + name, + path.display() + ) + } + } + let new_src = new_id.load(self.config)?; + let old_src = id.load(self.config)?; + if !new_src.supports_checksums() && old_src.supports_checksums() { + bail!( + "\ +cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ +checksums, but `{name}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", + orig = orig_name, + name = name + ); + } + + if old_src.requires_precise() && id.precise().is_none() { + bail!( + "\ +the source {orig} requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build +", + orig = orig_name + ); + } + + Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) + } + + fn add(&mut self, name: &str, cfg: SourceConfig) { + self.id2name.insert(cfg.id.clone(), name.to_string()); + self.cfgs.insert(name.to_string(), cfg); + } + + fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { + let (table, _path) = cfg.table(&format!("source.{}", name))?; + let mut srcs = Vec::new(); + if let Some(val) = table.get("registry") { + let url = url(val, &format!("source.{}.registry", name))?; + srcs.push(SourceId::for_registry(&url)?); + } + if let Some(val) = table.get("local-registry") { + let (s, path) = val.string(&format!("source.{}.local-registry", name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_local_registry(&path)?); + } + if let Some(val) = table.get("directory") { + let (s, path) = val.string(&format!("source.{}.directory", name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_directory(&path)?); + } + if let Some(val) = table.get("git") { + let url = url(val, &format!("source.{}.git", name))?; + let try = |s: &str| { + let val = match table.get(s) { + Some(s) => s, + None => return Ok(None), + }; + let key = format!("source.{}.{}", name, s); + val.string(&key).map(Some) + }; + let reference = match try("branch")? { + Some(b) => GitReference::Branch(b.0.to_string()), + None => match try("tag")? { + Some(b) => GitReference::Tag(b.0.to_string()), + None => match try("rev")? { + Some(b) => GitReference::Rev(b.0.to_string()), + None => GitReference::Branch("master".to_string()), + }, + }, + }; + srcs.push(SourceId::for_git(&url, reference)?); + } + if name == "crates-io" && srcs.is_empty() { + srcs.push(SourceId::crates_io(self.config)?); + } + + let mut srcs = srcs.into_iter(); + let src = srcs.next().ok_or_else(|| { + format_err!( + "no source URL specified for `source.{}`, need \ + either `registry` or `local-registry` defined", + name + ) + })?; + if srcs.next().is_some() { + bail!("more than one source URL specified for `source.{}`", name) + } + + let mut replace_with = None; + if let Some(val) = table.get("replace-with") { + let (s, path) = val.string(&format!("source.{}.replace-with", name))?; + replace_with = Some((s.to_string(), path.to_path_buf())); + } + + self.add( + name, + SourceConfig { + id: src, + replace_with, + }, + ); + + return Ok(()); + + fn url(cfg: &ConfigValue, key: &str) -> CargoResult { + let (url, path) = cfg.string(key)?; + let url = url.to_url().chain_err(|| { + format!( + "configuration key `{}` specified an invalid \ + URL (in {})", + key, + path.display() + ) + })?; + Ok(url) + } + } +} diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs new file mode 100644 index 000000000..a4d36ead4 --- /dev/null +++ b/src/cargo/sources/directory.rs @@ -0,0 +1,203 @@ +use std::collections::HashMap; +use std::fmt::{self, Debug, Formatter}; +use std::fs::File; +use std::io::Read; +use std::path::{Path, PathBuf}; + +use hex; + +use serde_json; + +use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary}; +use sources::PathSource; +use util::{Config, Sha256}; +use util::errors::{CargoResult, CargoResultExt}; +use util::paths; + +pub struct DirectorySource<'cfg> { + source_id: SourceId, + root: PathBuf, + packages: HashMap, + config: &'cfg Config, +} + +#[derive(Deserialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl<'cfg> DirectorySource<'cfg> { + pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { + DirectorySource { + source_id: id.clone(), + root: path.to_path_buf(), + config, + packages: HashMap::new(), + } + } +} + +impl<'cfg> Debug for DirectorySource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "DirectorySource {{ root: {:?} }}", self.root) + } +} + +impl<'cfg> Registry for DirectorySource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + let packages = self.packages.values().map(|p| &p.0); + let matches = packages.filter(|pkg| dep.matches(pkg.summary())); + for summary in matches.map(|pkg| pkg.summary().clone()) { + f(summary); + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + true + } +} + +impl<'cfg> Source for DirectorySource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + self.packages.clear(); + let entries = self.root.read_dir().chain_err(|| { + format!( + "failed to read root of directory source: {}", + self.root.display() + ) + })?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + // Ignore hidden/dot directories as they typically don't contain + // crates and otherwise may conflict with a VCS + // (rust-lang/cargo#3414). + if let Some(s) = path.file_name().and_then(|s| s.to_str()) { + if s.starts_with('.') { + continue; + } + } + + // Vendor directories are often checked into a VCS, but throughout + // the lifetime of a vendor dir crates are often added and deleted. + // Some VCS implementations don't always fully delete the directory + // when a dir is removed from a different checkout. Sometimes a + // mostly-empty dir is left behind. + // + // Additionally vendor directories are sometimes accompanied with + // readme files and other auxiliary information not too interesting + // to Cargo. + // + // To help handle all this we only try processing folders with a + // `Cargo.toml` in them. This has the upside of being pretty + // flexible with the contents of vendor directories but has the + // downside of accidentally misconfigured vendor directories + // silently returning less crates. + if !path.join("Cargo.toml").exists() { + continue; + } + + let mut src = PathSource::new(&path, &self.source_id, self.config); + src.update()?; + let pkg = src.root_package()?; + + let cksum_file = path.join(".cargo-checksum.json"); + let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { + format!( + "failed to load checksum `.cargo-checksum.json` \ + of {} v{}", + pkg.package_id().name(), + pkg.package_id().version() + ) + })?; + let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { + format!( + "failed to decode `.cargo-checksum.json` of \ + {} v{}", + pkg.package_id().name(), + pkg.package_id().version() + ) + })?; + + let mut manifest = pkg.manifest().clone(); + let mut summary = manifest.summary().clone(); + if let Some(ref package) = cksum.package { + summary = summary.set_checksum(package.clone()); + } + manifest.set_summary(summary); + let pkg = Package::new(manifest, pkg.manifest_path()); + self.packages.insert(pkg.package_id().clone(), (pkg, cksum)); + } + + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + self.packages + .get(id) + .map(|p| &p.0) + .cloned() + .ok_or_else(|| format_err!("failed to find package with id: {}", id)) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let (pkg, cksum) = match self.packages.get(id) { + Some(&(ref pkg, ref cksum)) => (pkg, cksum), + None => bail!("failed to find entry for `{}` in directory source", id), + }; + + let mut buf = [0; 16 * 1024]; + for (file, cksum) in cksum.files.iter() { + let mut h = Sha256::new(); + let file = pkg.root().join(file); + + (|| -> CargoResult<()> { + let mut f = File::open(&file)?; + loop { + match f.read(&mut buf)? { + 0 => return Ok(()), + n => h.update(&buf[..n]), + } + } + })() + .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?; + + let actual = hex::encode(h.finish()); + if &*actual != cksum { + bail!( + "\ + the listed checksum of `{}` has changed:\n\ + expected: {}\n\ + actual: {}\n\ + \n\ + directory sources are not intended to be edited, if \ + modifications are required then it is recommended \ + that [replace] is used with a forked copy of the \ + source\ + ", + file.display(), + cksum, + actual + ); + } + } + + Ok(()) + } +} diff --git a/src/cargo/sources/git/mod.rs b/src/cargo/sources/git/mod.rs new file mode 100644 index 000000000..0b4378654 --- /dev/null +++ b/src/cargo/sources/git/mod.rs @@ -0,0 +1,4 @@ +pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision}; +pub use self::source::{canonicalize_url, GitSource}; +mod utils; +mod source; diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs new file mode 100644 index 000000000..75573d38b --- /dev/null +++ b/src/cargo/sources/git/source.rs @@ -0,0 +1,282 @@ +use std::fmt::{self, Debug, Formatter}; + +use url::Url; + +use core::source::{Source, SourceId}; +use core::GitReference; +use core::{Dependency, Package, PackageId, Registry, Summary}; +use util::Config; +use util::errors::CargoResult; +use util::hex::short_hash; +use sources::PathSource; +use sources::git::utils::{GitRemote, GitRevision}; + +/* TODO: Refactor GitSource to delegate to a PathSource + */ +pub struct GitSource<'cfg> { + remote: GitRemote, + reference: GitReference, + source_id: SourceId, + path_source: Option>, + rev: Option, + ident: String, + config: &'cfg Config, +} + +impl<'cfg> GitSource<'cfg> { + pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult> { + assert!(source_id.is_git(), "id is not git, id={}", source_id); + + let remote = GitRemote::new(source_id.url()); + let ident = ident(source_id.url())?; + + let reference = match source_id.precise() { + Some(s) => GitReference::Rev(s.to_string()), + None => source_id.git_reference().unwrap().clone(), + }; + + let source = GitSource { + remote, + reference, + source_id: source_id.clone(), + path_source: None, + rev: None, + ident, + config, + }; + + Ok(source) + } + + pub fn url(&self) -> &Url { + self.remote.url() + } + + pub fn read_packages(&mut self) -> CargoResult> { + if self.path_source.is_none() { + self.update()?; + } + self.path_source.as_mut().unwrap().read_packages() + } +} + +fn ident(url: &Url) -> CargoResult { + let url = canonicalize_url(url)?; + let ident = url.path_segments() + .and_then(|mut s| s.next_back()) + .unwrap_or(""); + + let ident = if ident == "" { "_empty" } else { ident }; + + Ok(format!("{}-{}", ident, short_hash(&url))) +} + +// Some hacks and heuristics for making equivalent URLs hash the same +pub fn canonicalize_url(url: &Url) -> CargoResult { + let mut url = url.clone(); + + // cannot-be-a-base-urls are not supported + // eg. github.com:rust-lang-nursery/rustfmt.git + if url.cannot_be_a_base() { + bail!( + "invalid url `{}`: cannot-be-a-base-URLs are not supported", + url + ) + } + + // Strip a trailing slash + if url.path().ends_with('/') { + url.path_segments_mut().unwrap().pop_if_empty(); + } + + // HACKHACK: For github URL's specifically just lowercase + // everything. GitHub treats both the same, but they hash + // differently, and we're gonna be hashing them. This wants a more + // general solution, and also we're almost certainly not using the + // same case conversion rules that GitHub does. (#84) + if url.host_str() == Some("github.com") { + url.set_scheme("https").unwrap(); + let path = url.path().to_lowercase(); + url.set_path(&path); + } + + // Repos generally can be accessed with or w/o '.git' + let needs_chopping = url.path().ends_with(".git"); + if needs_chopping { + let last = { + let last = url.path_segments().unwrap().next_back().unwrap(); + last[..last.len() - 4].to_owned() + }; + url.path_segments_mut().unwrap().pop().push(&last); + } + + Ok(url) +} + +impl<'cfg> Debug for GitSource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "git repo at {}", self.remote.url())?; + + match self.reference.pretty_ref() { + Some(s) => write!(f, " ({})", s), + None => Ok(()), + } + } +} + +impl<'cfg> Registry for GitSource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + let src = self.path_source + .as_mut() + .expect("BUG: update() must be called before query()"); + src.query(dep, f) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + true + } +} + +impl<'cfg> Source for GitSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + let lock = + self.config + .git_path() + .open_rw(".cargo-lock-git", self.config, "the git checkouts")?; + + let db_path = lock.parent().join("db").join(&self.ident); + + if self.config.cli_unstable().offline && !db_path.exists() { + bail!( + "can't checkout from '{}': you are in the offline mode (-Z offline)", + self.remote.url() + ); + } + + // Resolve our reference to an actual revision, and check if the + // database already has that revision. If it does, we just load a + // database pinned at that revision, and if we don't we issue an update + // to try to find the revision. + let actual_rev = self.remote.rev_for(&db_path, &self.reference); + let should_update = actual_rev.is_err() || self.source_id.precise().is_none(); + + let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline { + self.config.shell().status( + "Updating", + format!("git repository `{}`", self.remote.url()), + )?; + + trace!("updating git source `{:?}`", self.remote); + + self.remote + .checkout(&db_path, &self.reference, self.config)? + } else { + (self.remote.db_at(&db_path)?, actual_rev.unwrap()) + }; + + // Don’t use the full hash, + // to contribute less to reaching the path length limit on Windows: + // https://github.com/servo/servo/pull/14397 + let short_id = db.to_short_id(actual_rev.clone()).unwrap(); + + let checkout_path = lock.parent() + .join("checkouts") + .join(&self.ident) + .join(short_id.as_str()); + + // Copy the database to the checkout location. After this we could drop + // the lock on the database as we no longer needed it, but we leave it + // in scope so the destructors here won't tamper with too much. + // Checkout is immutable, so we don't need to protect it with a lock once + // it is created. + db.copy_to(actual_rev.clone(), &checkout_path, self.config)?; + + let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); + let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config); + + self.path_source = Some(path_source); + self.rev = Some(actual_rev); + self.path_source.as_mut().unwrap().update() + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!( + "getting packages for package id `{}` from `{:?}`", + id, + self.remote + ); + self.path_source + .as_mut() + .expect("BUG: update() must be called before get()") + .download(id) + } + + fn fingerprint(&self, _pkg: &Package) -> CargoResult { + Ok(self.rev.as_ref().unwrap().to_string()) + } +} + +#[cfg(test)] +mod test { + use url::Url; + use super::ident; + use util::ToUrl; + + #[test] + pub fn test_url_to_path_ident_with_path() { + let ident = ident(&url("https://github.com/carlhuda/cargo")).unwrap(); + assert!(ident.starts_with("cargo-")); + } + + #[test] + pub fn test_url_to_path_ident_without_path() { + let ident = ident(&url("https://github.com")).unwrap(); + assert!(ident.starts_with("_empty-")); + } + + #[test] + fn test_canonicalize_idents_by_stripping_trailing_url_slash() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_by_lowercasing_github_urls() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/pistondevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_by_stripping_dot_git() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_idents_different_protocols() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")).unwrap(); + assert_eq!(ident1, ident2); + } + + #[test] + fn test_canonicalize_cannot_be_a_base_urls() { + assert!(ident(&url("github.com:PistonDevelopers/piston")).is_err()); + assert!(ident(&url("google.com:PistonDevelopers/piston")).is_err()); + } + + fn url(s: &str) -> Url { + s.to_url().unwrap() + } +} diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs new file mode 100644 index 000000000..2314db18c --- /dev/null +++ b/src/cargo/sources/git/utils.rs @@ -0,0 +1,869 @@ +use std::env; +use std::fmt; +use std::fs::{self, File}; +use std::mem; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use curl::easy::{Easy, List}; +use git2::{self, ObjectType}; +use serde::ser::{self, Serialize}; +use url::Url; + +use core::GitReference; +use util::{internal, network, Config, Progress, ToUrl}; +use util::paths; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +#[derive(PartialEq, Clone, Debug)] +pub struct GitRevision(git2::Oid); + +impl ser::Serialize for GitRevision { + fn serialize(&self, s: S) -> Result { + serialize_str(self, s) + } +} + +fn serialize_str(t: &T, s: S) -> Result +where + T: fmt::Display, + S: ser::Serializer, +{ + t.to_string().serialize(s) +} + +impl fmt::Display for GitRevision { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + +pub struct GitShortID(git2::Buf); + +impl GitShortID { + pub fn as_str(&self) -> &str { + self.0.as_str().unwrap() + } +} + +/// `GitRemote` represents a remote repository. It gets cloned into a local +/// `GitDatabase`. +#[derive(PartialEq, Clone, Debug, Serialize)] +pub struct GitRemote { + #[serde(serialize_with = "serialize_str")] url: Url, +} + +/// `GitDatabase` is a local clone of a remote repository's database. Multiple +/// `GitCheckouts` can be cloned from this `GitDatabase`. +#[derive(Serialize)] +pub struct GitDatabase { + remote: GitRemote, + path: PathBuf, + #[serde(skip_serializing)] repo: git2::Repository, +} + +/// `GitCheckout` is a local checkout of a particular revision. Calling +/// `clone_into` with a reference will resolve the reference into a revision, +/// and return a `CargoError` if no revision for that reference was found. +#[derive(Serialize)] +pub struct GitCheckout<'a> { + database: &'a GitDatabase, + location: PathBuf, + revision: GitRevision, + #[serde(skip_serializing)] repo: git2::Repository, +} + +// Implementations + +impl GitRemote { + pub fn new(url: &Url) -> GitRemote { + GitRemote { url: url.clone() } + } + + pub fn url(&self) -> &Url { + &self.url + } + + pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { + reference.resolve(&self.db_at(path)?.repo) + } + + pub fn checkout( + &self, + into: &Path, + reference: &GitReference, + cargo_config: &Config, + ) -> CargoResult<(GitDatabase, GitRevision)> { + let mut repo_and_rev = None; + if let Ok(mut repo) = git2::Repository::open(into) { + self.fetch_into(&mut repo, cargo_config) + .chain_err(|| format!("failed to fetch into {}", into.display()))?; + if let Ok(rev) = reference.resolve(&repo) { + repo_and_rev = Some((repo, rev)); + } + } + let (repo, rev) = match repo_and_rev { + Some(pair) => pair, + None => { + let repo = self.clone_into(into, cargo_config) + .chain_err(|| format!("failed to clone into: {}", into.display()))?; + let rev = reference.resolve(&repo)?; + (repo, rev) + } + }; + + Ok(( + GitDatabase { + remote: self.clone(), + path: into.to_path_buf(), + repo, + }, + rev, + )) + } + + pub fn db_at(&self, db_path: &Path) -> CargoResult { + let repo = git2::Repository::open(db_path)?; + Ok(GitDatabase { + remote: self.clone(), + path: db_path.to_path_buf(), + repo, + }) + } + + fn fetch_into(&self, dst: &mut git2::Repository, cargo_config: &Config) -> CargoResult<()> { + // Create a local anonymous remote in the repository to fetch the url + let refspec = "refs/heads/*:refs/heads/*"; + fetch(dst, &self.url, refspec, cargo_config) + } + + fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { + if fs::metadata(&dst).is_ok() { + paths::remove_dir_all(dst)?; + } + fs::create_dir_all(dst)?; + let mut repo = git2::Repository::init_bare(dst)?; + fetch( + &mut repo, + &self.url, + "refs/heads/*:refs/heads/*", + cargo_config, + )?; + Ok(repo) + } +} + +impl GitDatabase { + pub fn copy_to( + &self, + rev: GitRevision, + dest: &Path, + cargo_config: &Config, + ) -> CargoResult { + let mut checkout = None; + if let Ok(repo) = git2::Repository::open(dest) { + let mut co = GitCheckout::new(dest, self, rev.clone(), repo); + if !co.is_fresh() { + // After a successful fetch operation do a sanity check to + // ensure we've got the object in our database to reset to. This + // can fail sometimes for corrupt repositories where the fetch + // operation succeeds but the object isn't actually there. + co.fetch(cargo_config)?; + if co.has_object() { + co.reset(cargo_config)?; + assert!(co.is_fresh()); + checkout = Some(co); + } + } else { + checkout = Some(co); + } + }; + let checkout = match checkout { + Some(c) => c, + None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, + }; + checkout.update_submodules(cargo_config)?; + Ok(checkout) + } + + pub fn to_short_id(&self, revision: GitRevision) -> CargoResult { + let obj = self.repo.find_object(revision.0, None)?; + Ok(GitShortID(obj.short_id()?)) + } + + pub fn has_ref(&self, reference: &str) -> CargoResult<()> { + self.repo.revparse_single(reference)?; + Ok(()) + } +} + +impl GitReference { + fn resolve(&self, repo: &git2::Repository) -> CargoResult { + let id = match *self { + GitReference::Tag(ref s) => (|| -> CargoResult { + let refname = format!("refs/tags/{}", s); + let id = repo.refname_to_id(&refname)?; + let obj = repo.find_object(id, None)?; + let obj = obj.peel(ObjectType::Commit)?; + Ok(obj.id()) + })() + .chain_err(|| format!("failed to find tag `{}`", s))?, + GitReference::Branch(ref s) => { + (|| { + let b = repo.find_branch(s, git2::BranchType::Local)?; + b.get() + .target() + .ok_or_else(|| format_err!("branch `{}` did not have a target", s)) + })() + .chain_err(|| format!("failed to find branch `{}`", s))? + } + GitReference::Rev(ref s) => { + let obj = repo.revparse_single(s)?; + match obj.as_tag() { + Some(tag) => tag.target_id(), + None => obj.id(), + } + } + }; + Ok(GitRevision(id)) + } +} + +impl<'a> GitCheckout<'a> { + fn new( + path: &Path, + database: &'a GitDatabase, + revision: GitRevision, + repo: git2::Repository, + ) -> GitCheckout<'a> { + GitCheckout { + location: path.to_path_buf(), + database, + revision, + repo, + } + } + + fn clone_into( + into: &Path, + database: &'a GitDatabase, + revision: GitRevision, + config: &Config, + ) -> CargoResult> { + let dirname = into.parent().unwrap(); + fs::create_dir_all(&dirname).chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?; + if into.exists() { + paths::remove_dir_all(into)?; + } + + // we're doing a local filesystem-to-filesystem clone so there should + // be no need to respect global configuration options, so pass in + // an empty instance of `git2::Config` below. + let git_config = git2::Config::new()?; + + // Clone the repository, but make sure we use the "local" option in + // libgit2 which will attempt to use hardlinks to set up the database. + // This should speed up the clone operation quite a bit if it works. + // + // Note that we still use the same fetch options because while we don't + // need authentication information we may want progress bars and such. + let url = database.path.to_url()?; + let mut repo = None; + with_fetch_options(&git_config, &url, config, &mut |fopts| { + let mut checkout = git2::build::CheckoutBuilder::new(); + checkout.dry_run(); // we'll do this below during a `reset` + + let r = git2::build::RepoBuilder::new() + // use hard links and/or copy the database, we're doing a + // filesystem clone so this'll speed things up quite a bit. + .clone_local(git2::build::CloneLocal::Local) + .with_checkout(checkout) + .fetch_options(fopts) + // .remote_create(|repo, _name, url| repo.remote_anonymous(url)) + .clone(url.as_str(), into)?; + repo = Some(r); + Ok(()) + })?; + let repo = repo.unwrap(); + + let checkout = GitCheckout::new(into, database, revision, repo); + checkout.reset(config)?; + Ok(checkout) + } + + fn is_fresh(&self) -> bool { + match self.repo.revparse_single("HEAD") { + Ok(ref head) if head.id() == self.revision.0 => { + // See comments in reset() for why we check this + self.location.join(".cargo-ok").exists() + } + _ => false, + } + } + + fn fetch(&mut self, cargo_config: &Config) -> CargoResult<()> { + info!("fetch {}", self.repo.path().display()); + let url = self.database.path.to_url()?; + let refspec = "refs/heads/*:refs/heads/*"; + fetch(&mut self.repo, &url, refspec, cargo_config)?; + Ok(()) + } + + fn has_object(&self) -> bool { + self.repo.find_object(self.revision.0, None).is_ok() + } + + fn reset(&self, config: &Config) -> CargoResult<()> { + // If we're interrupted while performing this reset (e.g. we die because + // of a signal) Cargo needs to be sure to try to check out this repo + // again on the next go-round. + // + // To enable this we have a dummy file in our checkout, .cargo-ok, which + // if present means that the repo has been successfully reset and is + // ready to go. Hence if we start to do a reset, we make sure this file + // *doesn't* exist, and then once we're done we create the file. + let ok_file = self.location.join(".cargo-ok"); + let _ = paths::remove_file(&ok_file); + info!("reset {} to {}", self.repo.path().display(), self.revision); + let object = self.repo.find_object(self.revision.0, None)?; + reset(&self.repo, &object, config)?; + File::create(ok_file)?; + Ok(()) + } + + fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { + return update_submodules(&self.repo, cargo_config); + + fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { + info!("update submodules for: {:?}", repo.workdir().unwrap()); + + for mut child in repo.submodules()? { + update_submodule(repo, &mut child, cargo_config).chain_err(|| { + format!( + "failed to update submodule `{}`", + child.name().unwrap_or("") + ) + })?; + } + Ok(()) + } + + fn update_submodule( + parent: &git2::Repository, + child: &mut git2::Submodule, + cargo_config: &Config, + ) -> CargoResult<()> { + child.init(false)?; + let url = child + .url() + .ok_or_else(|| internal("non-utf8 url for submodule"))?; + + // A submodule which is listed in .gitmodules but not actually + // checked out will not have a head id, so we should ignore it. + let head = match child.head_id() { + Some(head) => head, + None => return Ok(()), + }; + + // If the submodule hasn't been checked out yet, we need to + // clone it. If it has been checked out and the head is the same + // as the submodule's head, then we can skip an update and keep + // recursing. + let head_and_repo = child.open().and_then(|repo| { + let target = repo.head()?.target(); + Ok((target, repo)) + }); + let mut repo = match head_and_repo { + Ok((head, repo)) => { + if child.head_id() == head { + return update_submodules(&repo, cargo_config); + } + repo + } + Err(..) => { + let path = parent.workdir().unwrap().join(child.path()); + let _ = paths::remove_dir_all(&path); + git2::Repository::init(&path)? + } + }; + + // Fetch data from origin and reset to the head commit + let refspec = "refs/heads/*:refs/heads/*"; + let url = url.to_url()?; + fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| { + internal(format!( + "failed to fetch submodule `{}` from {}", + child.name().unwrap_or(""), + url + )) + })?; + + let obj = repo.find_object(head, None)?; + reset(&repo, &obj, cargo_config)?; + update_submodules(&repo, cargo_config) + } + } +} + +/// Prepare the authentication callbacks for cloning a git repository. +/// +/// The main purpose of this function is to construct the "authentication +/// callback" which is used to clone a repository. This callback will attempt to +/// find the right authentication on the system (without user input) and will +/// guide libgit2 in doing so. +/// +/// The callback is provided `allowed` types of credentials, and we try to do as +/// much as possible based on that: +/// +/// * Prioritize SSH keys from the local ssh agent as they're likely the most +/// reliable. The username here is prioritized from the credential +/// callback, then from whatever is configured in git itself, and finally +/// we fall back to the generic user of `git`. +/// +/// * If a username/password is allowed, then we fallback to git2-rs's +/// implementation of the credential helper. This is what is configured +/// with `credential.helper` in git, and is the interface for the OSX +/// keychain, for example. +/// +/// * After the above two have failed, we just kinda grapple attempting to +/// return *something*. +/// +/// If any form of authentication fails, libgit2 will repeatedly ask us for +/// credentials until we give it a reason to not do so. To ensure we don't +/// just sit here looping forever we keep track of authentications we've +/// attempted and we don't try the same ones again. +fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult +where + F: FnMut(&mut git2::Credentials) -> CargoResult, +{ + let mut cred_helper = git2::CredentialHelper::new(url); + cred_helper.config(cfg); + + let mut ssh_username_requested = false; + let mut cred_helper_bad = None; + let mut ssh_agent_attempts = Vec::new(); + let mut any_attempts = false; + let mut tried_sshkey = false; + + let mut res = f(&mut |url, username, allowed| { + any_attempts = true; + // libgit2's "USERNAME" authentication actually means that it's just + // asking us for a username to keep going. This is currently only really + // used for SSH authentication and isn't really an authentication type. + // The logic currently looks like: + // + // let user = ...; + // if (user.is_null()) + // user = callback(USERNAME, null, ...); + // + // callback(SSH_KEY, user, ...) + // + // So if we're being called here then we know that (a) we're using ssh + // authentication and (b) no username was specified in the URL that + // we're trying to clone. We need to guess an appropriate username here, + // but that may involve a few attempts. Unfortunately we can't switch + // usernames during one authentication session with libgit2, so to + // handle this we bail out of this authentication session after setting + // the flag `ssh_username_requested`, and then we handle this below. + if allowed.contains(git2::CredentialType::USERNAME) { + debug_assert!(username.is_none()); + ssh_username_requested = true; + return Err(git2::Error::from_str("gonna try usernames later")); + } + + // An "SSH_KEY" authentication indicates that we need some sort of SSH + // authentication. This can currently either come from the ssh-agent + // process or from a raw in-memory SSH key. Cargo only supports using + // ssh-agent currently. + // + // If we get called with this then the only way that should be possible + // is if a username is specified in the URL itself (e.g. `username` is + // Some), hence the unwrap() here. We try custom usernames down below. + if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey { + // If ssh-agent authentication fails, libgit2 will keep + // calling this callback asking for other authentication + // methods to try. Make sure we only try ssh-agent once, + // to avoid looping forever. + tried_sshkey = true; + let username = username.unwrap(); + debug_assert!(!ssh_username_requested); + ssh_agent_attempts.push(username.to_string()); + return git2::Cred::ssh_key_from_agent(username); + } + + // Sometimes libgit2 will ask for a username/password in plaintext. This + // is where Cargo would have an interactive prompt if we supported it, + // but we currently don't! Right now the only way we support fetching a + // plaintext password is through the `credential.helper` support, so + // fetch that here. + if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) { + let r = git2::Cred::credential_helper(cfg, url, username); + cred_helper_bad = Some(r.is_err()); + return r; + } + + // I'm... not sure what the DEFAULT kind of authentication is, but seems + // easy to support? + if allowed.contains(git2::CredentialType::DEFAULT) { + return git2::Cred::default(); + } + + // Whelp, we tried our best + Err(git2::Error::from_str("no authentication available")) + }); + + // Ok, so if it looks like we're going to be doing ssh authentication, we + // want to try a few different usernames as one wasn't specified in the URL + // for us to use. In order, we'll try: + // + // * A credential helper's username for this URL, if available. + // * This account's username. + // * "git" + // + // We have to restart the authentication session each time (due to + // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we + // call our callback, `f`, in a loop here. + if ssh_username_requested { + debug_assert!(res.is_err()); + let mut attempts = Vec::new(); + attempts.push("git".to_string()); + if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) { + attempts.push(s); + } + if let Some(ref s) = cred_helper.username { + attempts.push(s.clone()); + } + + while let Some(s) = attempts.pop() { + // We should get `USERNAME` first, where we just return our attempt, + // and then after that we should get `SSH_KEY`. If the first attempt + // fails we'll get called again, but we don't have another option so + // we bail out. + let mut attempts = 0; + res = f(&mut |_url, username, allowed| { + if allowed.contains(git2::CredentialType::USERNAME) { + return git2::Cred::username(&s); + } + if allowed.contains(git2::CredentialType::SSH_KEY) { + debug_assert_eq!(Some(&s[..]), username); + attempts += 1; + if attempts == 1 { + ssh_agent_attempts.push(s.to_string()); + return git2::Cred::ssh_key_from_agent(&s); + } + } + Err(git2::Error::from_str("no authentication available")) + }); + + // If we made two attempts then that means: + // + // 1. A username was requested, we returned `s`. + // 2. An ssh key was requested, we returned to look up `s` in the + // ssh agent. + // 3. For whatever reason that lookup failed, so we were asked again + // for another mode of authentication. + // + // Essentially, if `attempts == 2` then in theory the only error was + // that this username failed to authenticate (e.g. no other network + // errors happened). Otherwise something else is funny so we bail + // out. + if attempts != 2 { + break; + } + } + } + + if res.is_ok() || !any_attempts { + return res.map_err(From::from); + } + + // In the case of an authentication failure (where we tried something) then + // we try to give a more helpful error message about precisely what we + // tried. + let res = res.map_err(CargoError::from).chain_err(|| { + let mut msg = "failed to authenticate when downloading \ + repository" + .to_string(); + if !ssh_agent_attempts.is_empty() { + let names = ssh_agent_attempts + .iter() + .map(|s| format!("`{}`", s)) + .collect::>() + .join(", "); + msg.push_str(&format!( + "\nattempted ssh-agent authentication, but \ + none of the usernames {} succeeded", + names + )); + } + if let Some(failed_cred_helper) = cred_helper_bad { + if failed_cred_helper { + msg.push_str( + "\nattempted to find username/password via \ + git's `credential.helper` support, but failed", + ); + } else { + msg.push_str( + "\nattempted to find username/password via \ + `credential.helper`, but maybe the found \ + credentials were incorrect", + ); + } + } + msg + })?; + Ok(res) +} + +fn reset(repo: &git2::Repository, obj: &git2::Object, config: &Config) -> CargoResult<()> { + let mut pb = Progress::new("Checkout", config); + let mut opts = git2::build::CheckoutBuilder::new(); + opts.progress(|_, cur, max| { + drop(pb.tick(cur, max)); + }); + repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?; + Ok(()) +} + +pub fn with_fetch_options( + git_config: &git2::Config, + url: &Url, + config: &Config, + cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>, +) -> CargoResult<()> { + let mut progress = Progress::new("Fetch", config); + network::with_retry(config, || { + with_authentication(url.as_str(), git_config, |f| { + let mut rcb = git2::RemoteCallbacks::new(); + rcb.credentials(f); + + rcb.transfer_progress(|stats| { + progress + .tick(stats.indexed_objects(), stats.total_objects()) + .is_ok() + }); + + // Create a local anonymous remote in the repository to fetch the + // url + let mut opts = git2::FetchOptions::new(); + opts.remote_callbacks(rcb) + .download_tags(git2::AutotagOption::All); + cb(opts) + })?; + Ok(()) + }) +} + +pub fn fetch( + repo: &mut git2::Repository, + url: &Url, + refspec: &str, + config: &Config, +) -> CargoResult<()> { + if config.frozen() { + bail!( + "attempting to update a git repository, but --frozen \ + was specified" + ) + } + if !config.network_allowed() { + bail!("can't update a git repository in the offline mode") + } + + // If we're fetching from github, attempt github's special fast path for + // testing if we've already got an up-to-date copy of the repository + if url.host_str() == Some("github.com") { + if let Ok(oid) = repo.refname_to_id("refs/remotes/origin/master") { + let mut handle = config.http()?.borrow_mut(); + debug!("attempting github fast path for {}", url); + if github_up_to_date(&mut handle, url, &oid) { + return Ok(()); + } else { + debug!("fast path failed, falling back to a git fetch"); + } + } + } + + // We reuse repositories quite a lot, so before we go through and update the + // repo check to see if it's a little too old and could benefit from a gc. + // In theory this shouldn't be too too expensive compared to the network + // request we're about to issue. + maybe_gc_repo(repo)?; + + debug!("doing a fetch for {}", url); + let git_config = git2::Config::open_default()?; + with_fetch_options(&git_config, url, config, &mut |mut opts| { + // The `fetch` operation here may fail spuriously due to a corrupt + // repository. It could also fail, however, for a whole slew of other + // reasons (aka network related reasons). We want Cargo to automatically + // recover from corrupt repositories, but we don't want Cargo to stomp + // over other legitimate errors.o + // + // Consequently we save off the error of the `fetch` operation and if it + // looks like a "corrupt repo" error then we blow away the repo and try + // again. If it looks like any other kind of error, or if we've already + // blown away the repository, then we want to return the error as-is. + let mut repo_reinitialized = false; + loop { + debug!("initiating fetch of {} from {}", refspec, url); + let res = repo.remote_anonymous(url.as_str())? + .fetch(&[refspec], Some(&mut opts), None); + let err = match res { + Ok(()) => break, + Err(e) => e, + }; + debug!("fetch failed: {}", err); + + if !repo_reinitialized && err.class() == git2::ErrorClass::Reference { + repo_reinitialized = true; + debug!( + "looks like this is a corrupt repository, reinitializing \ + and trying again" + ); + if reinitialize(repo).is_ok() { + continue; + } + } + + return Err(err.into()); + } + Ok(()) + }) +} + +/// Cargo has a bunch of long-lived git repositories in its global cache and +/// some, like the index, are updated very frequently. Right now each update +/// creates a new "pack file" inside the git database, and over time this can +/// cause bad performance and bad current behavior in libgit2. +/// +/// One pathological use case today is where libgit2 opens hundreds of file +/// descriptors, getting us dangerously close to blowing out the OS limits of +/// how many fds we can have open. This is detailed in #4403. +/// +/// To try to combat this problem we attempt a `git gc` here. Note, though, that +/// we may not even have `git` installed on the system! As a result we +/// opportunistically try a `git gc` when the pack directory looks too big, and +/// failing that we just blow away the repository and start over. +fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { + // Here we arbitrarily declare that if you have more than 100 files in your + // `pack` folder that we need to do a gc. + let entries = match repo.path().join("objects/pack").read_dir() { + Ok(e) => e.count(), + Err(_) => { + debug!("skipping gc as pack dir appears gone"); + return Ok(()); + } + }; + let max = env::var("__CARGO_PACKFILE_LIMIT") + .ok() + .and_then(|s| s.parse::().ok()) + .unwrap_or(100); + if entries < max { + debug!("skipping gc as there's only {} pack files", entries); + return Ok(()); + } + + // First up, try a literal `git gc` by shelling out to git. This is pretty + // likely to fail though as we may not have `git` installed. Note that + // libgit2 doesn't currently implement the gc operation, so there's no + // equivalent there. + match Command::new("git") + .arg("gc") + .current_dir(repo.path()) + .output() + { + Ok(out) => { + debug!( + "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", + out.status, + String::from_utf8_lossy(&out.stdout), + String::from_utf8_lossy(&out.stderr) + ); + if out.status.success() { + let new = git2::Repository::open(repo.path())?; + mem::replace(repo, new); + return Ok(()); + } + } + Err(e) => debug!("git-gc failed to spawn: {}", e), + } + + // Alright all else failed, let's start over. + reinitialize(repo) +} + +fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> { + // Here we want to drop the current repository object pointed to by `repo`, + // so we initialize temporary repository in a sub-folder, blow away the + // existing git folder, and then recreate the git repo. Finally we blow away + // the `tmp` folder we allocated. + let path = repo.path().to_path_buf(); + debug!("reinitializing git repo at {:?}", path); + let tmp = path.join("tmp"); + let bare = !repo.path().ends_with(".git"); + *repo = git2::Repository::init(&tmp)?; + for entry in path.read_dir()? { + let entry = entry?; + if entry.file_name().to_str() == Some("tmp") { + continue; + } + let path = entry.path(); + drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); + } + if bare { + *repo = git2::Repository::init_bare(path)?; + } else { + *repo = git2::Repository::init(path)?; + } + paths::remove_dir_all(&tmp)?; + Ok(()) +} + +/// Updating the index is done pretty regularly so we want it to be as fast as +/// possible. For registries hosted on github (like the crates.io index) there's +/// a fast path available to use [1] to tell us that there's no updates to be +/// made. +/// +/// This function will attempt to hit that fast path and verify that the `oid` +/// is actually the current `master` branch of the repository. If `true` is +/// returned then no update needs to be performed, but if `false` is returned +/// then the standard update logic still needs to happen. +/// +/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference +/// +/// Note that this function should never cause an actual failure because it's +/// just a fast path. As a result all errors are ignored in this function and we +/// just return a `bool`. Any real errors will be reported through the normal +/// update path above. +fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool { + macro_rules! try { + ($e:expr) => (match $e { + Some(e) => e, + None => return false, + }) + } + + // This expects github urls in the form `github.com/user/repo` and nothing + // else + let mut pieces = try!(url.path_segments()); + let username = try!(pieces.next()); + let repo = try!(pieces.next()); + if pieces.next().is_some() { + return false; + } + + let url = format!( + "https://api.github.com/repos/{}/{}/commits/master", + username, repo + ); + try!(handle.get(true).ok()); + try!(handle.url(&url).ok()); + try!(handle.useragent("cargo").ok()); + let mut headers = List::new(); + try!(headers.append("Accept: application/vnd.github.3.sha").ok()); + try!(headers.append(&format!("If-None-Match: \"{}\"", oid)).ok()); + try!(handle.http_headers(headers).ok()); + try!(handle.perform().ok()); + + try!(handle.response_code().ok()) == 304 +} diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs new file mode 100644 index 000000000..ed784e95a --- /dev/null +++ b/src/cargo/sources/mod.rs @@ -0,0 +1,13 @@ +pub use self::config::SourceConfigMap; +pub use self::directory::DirectorySource; +pub use self::git::GitSource; +pub use self::path::PathSource; +pub use self::registry::{RegistrySource, CRATES_IO}; +pub use self::replaced::ReplacedSource; + +pub mod config; +pub mod directory; +pub mod git; +pub mod path; +pub mod registry; +pub mod replaced; diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs new file mode 100644 index 000000000..433c97d9f --- /dev/null +++ b/src/cargo/sources/path.rs @@ -0,0 +1,544 @@ +use std::fmt::{self, Debug, Formatter}; +use std::fs; +use std::path::{Path, PathBuf}; + +use filetime::FileTime; +use git2; +use glob::Pattern; +use ignore::Match; +use ignore::gitignore::GitignoreBuilder; + +use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary}; +use ops; +use util::{self, internal, CargoResult}; +use util::Config; + +pub struct PathSource<'cfg> { + source_id: SourceId, + path: PathBuf, + updated: bool, + packages: Vec, + config: &'cfg Config, + recursive: bool, +} + +impl<'cfg> PathSource<'cfg> { + /// Invoked with an absolute path to a directory that contains a Cargo.toml. + /// + /// This source will only return the package at precisely the `path` + /// specified, and it will be an error if there's not a package at `path`. + pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { + PathSource { + source_id: id.clone(), + path: path.to_path_buf(), + updated: false, + packages: Vec::new(), + config, + recursive: false, + } + } + + /// Creates a new source which is walked recursively to discover packages. + /// + /// This is similar to the `new` method except that instead of requiring a + /// valid package to be present at `root` the folder is walked entirely to + /// crawl for packages. + /// + /// Note that this should be used with care and likely shouldn't be chosen + /// by default! + pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { + PathSource { + recursive: true, + ..PathSource::new(root, id, config) + } + } + + pub fn preload_with(&mut self, pkg: Package) { + assert!(!self.updated); + assert!(!self.recursive); + assert!(self.packages.is_empty()); + self.updated = true; + self.packages.push(pkg); + } + + pub fn root_package(&mut self) -> CargoResult { + trace!("root_package; source={:?}", self); + + self.update()?; + + match self.packages.iter().find(|p| p.root() == &*self.path) { + Some(pkg) => Ok(pkg.clone()), + None => Err(internal("no package found in source")), + } + } + + pub fn read_packages(&self) -> CargoResult> { + if self.updated { + Ok(self.packages.clone()) + } else if self.recursive { + ops::read_packages(&self.path, &self.source_id, self.config) + } else { + let path = self.path.join("Cargo.toml"); + let (pkg, _) = ops::read_package(&path, &self.source_id, self.config)?; + Ok(vec![pkg]) + } + } + + /// List all files relevant to building this package inside this source. + /// + /// This function will use the appropriate methods to determine the + /// set of files underneath this source's directory which are relevant for + /// building `pkg`. + /// + /// The basic assumption of this method is that all files in the directory + /// are relevant for building this package, but it also contains logic to + /// use other methods like .gitignore to filter the list of files. + /// + /// ## Pattern matching strategy + /// + /// Migrating from a glob-like pattern matching (using `glob` crate) to a + /// gitignore-like pattern matching (using `ignore` crate). The migration + /// stages are: + /// + /// 1) Only warn users about the future change iff their matching rules are + /// affected. (CURRENT STAGE) + /// + /// 2) Switch to the new strategy and update documents. Still keep warning + /// affected users. + /// + /// 3) Drop the old strategy and no more warnings. + /// + /// See for more info. + pub fn list_files(&self, pkg: &Package) -> CargoResult> { + let root = pkg.root(); + let no_include_option = pkg.manifest().include().is_empty(); + + // glob-like matching rules + + let glob_parse = |p: &String| { + let pattern: &str = if p.starts_with('/') { + &p[1..p.len()] + } else { + p + }; + Pattern::new(pattern) + .map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e)) + }; + + let glob_exclude = pkg.manifest() + .exclude() + .iter() + .map(|p| glob_parse(p)) + .collect::, _>>()?; + + let glob_include = pkg.manifest() + .include() + .iter() + .map(|p| glob_parse(p)) + .collect::, _>>()?; + + let glob_should_package = |relative_path: &Path| -> bool { + fn glob_match(patterns: &Vec, relative_path: &Path) -> bool { + patterns + .iter() + .any(|pattern| pattern.matches_path(relative_path)) + } + + // include and exclude options are mutually exclusive. + if no_include_option { + !glob_match(&glob_exclude, relative_path) + } else { + glob_match(&glob_include, relative_path) + } + }; + + // ignore-like matching rules + + let mut exclude_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().exclude() { + exclude_builder.add_line(None, rule)?; + } + let ignore_exclude = exclude_builder.build()?; + + let mut include_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().include() { + include_builder.add_line(None, rule)?; + } + let ignore_include = include_builder.build()?; + + let ignore_should_package = |relative_path: &Path| -> CargoResult { + // include and exclude options are mutually exclusive. + if no_include_option { + match ignore_exclude + .matched_path_or_any_parents(relative_path, /* is_dir */ false) + { + Match::None => Ok(true), + Match::Ignore(_) => Ok(false), + Match::Whitelist(pattern) => Err(format_err!( + "exclude rules cannot start with `!`: {}", + pattern.original() + )), + } + } else { + match ignore_include + .matched_path_or_any_parents(relative_path, /* is_dir */ false) + { + Match::None => Ok(false), + Match::Ignore(_) => Ok(true), + Match::Whitelist(pattern) => Err(format_err!( + "include rules cannot start with `!`: {}", + pattern.original() + )), + } + } + }; + + // matching to paths + + let mut filter = |path: &Path| -> CargoResult { + let relative_path = util::without_prefix(path, root).unwrap(); + let glob_should_package = glob_should_package(relative_path); + let ignore_should_package = ignore_should_package(relative_path)?; + + if glob_should_package != ignore_should_package { + if glob_should_package { + if no_include_option { + self.config.shell().warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL be excluded in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } else { + self.config.shell().warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL NOT be included in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } + } else if no_include_option { + self.config.shell().warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL NOT be excluded in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } else { + self.config.shell().warn(format!( + "Pattern matching for Cargo's include/exclude fields is changing and \ + file `{}` WILL be included in a future Cargo version.\n\ + See https://github.com/rust-lang/cargo/issues/4268 for more info", + relative_path.display() + ))?; + } + } + + // Update to ignore_should_package for Stage 2 + Ok(glob_should_package) + }; + + // attempt git-prepopulate only if no `include` (rust-lang/cargo#4135) + if no_include_option { + if let Some(result) = self.discover_git_and_list_files(pkg, root, &mut filter) { + return result; + } + } + self.list_files_walk(pkg, &mut filter) + } + + // Returns Some(_) if found sibling Cargo.toml and .git folder; + // otherwise caller should fall back on full file list. + fn discover_git_and_list_files( + &self, + pkg: &Package, + root: &Path, + filter: &mut FnMut(&Path) -> CargoResult, + ) -> Option>> { + // If this package is in a git repository, then we really do want to + // query the git repository as it takes into account items such as + // .gitignore. We're not quite sure where the git repository is, + // however, so we do a bit of a probe. + // + // We walk this package's path upwards and look for a sibling + // Cargo.toml and .git folder. If we find one then we assume that we're + // part of that repository. + let mut cur = root; + loop { + if cur.join("Cargo.toml").is_file() { + // If we find a git repository next to this Cargo.toml, we still + // check to see if we are indeed part of the index. If not, then + // this is likely an unrelated git repo, so keep going. + if let Ok(repo) = git2::Repository::open(cur) { + let index = match repo.index() { + Ok(index) => index, + Err(err) => return Some(Err(err.into())), + }; + let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml"); + if index.get_path(&path, 0).is_some() { + return Some(self.list_files_git(pkg, repo, filter)); + } + } + } + // don't cross submodule boundaries + if cur.join(".git").is_dir() { + break; + } + match cur.parent() { + Some(parent) => cur = parent, + None => break, + } + } + None + } + + fn list_files_git( + &self, + pkg: &Package, + repo: git2::Repository, + filter: &mut FnMut(&Path) -> CargoResult, + ) -> CargoResult> { + warn!("list_files_git {}", pkg.package_id()); + let index = repo.index()?; + let root = repo.workdir() + .ok_or_else(|| internal("Can't list files on a bare repository."))?; + let pkg_path = pkg.root(); + + let mut ret = Vec::::new(); + + // We use information from the git repository to guide us in traversing + // its tree. The primary purpose of this is to take advantage of the + // .gitignore and auto-ignore files that don't matter. + // + // Here we're also careful to look at both tracked and untracked files as + // the untracked files are often part of a build and may become relevant + // as part of a future commit. + let index_files = index.iter().map(|entry| { + use libgit2_sys::GIT_FILEMODE_COMMIT; + let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; + (join(root, &entry.path), Some(is_dir)) + }); + let mut opts = git2::StatusOptions::new(); + opts.include_untracked(true); + if let Some(suffix) = util::without_prefix(pkg_path, root) { + opts.pathspec(suffix); + } + let statuses = repo.statuses(Some(&mut opts))?; + let untracked = statuses.iter().filter_map(|entry| match entry.status() { + git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)), + _ => None, + }); + + let mut subpackages_found = Vec::new(); + + for (file_path, is_dir) in index_files.chain(untracked) { + let file_path = file_path?; + + // Filter out files blatantly outside this package. This is helped a + // bit obove via the `pathspec` function call, but we need to filter + // the entries in the index as well. + if !file_path.starts_with(pkg_path) { + continue; + } + + match file_path.file_name().and_then(|s| s.to_str()) { + // Filter out Cargo.lock and target always, we don't want to + // package a lock file no one will ever read and we also avoid + // build artifacts + Some("Cargo.lock") | Some("target") => continue, + + // Keep track of all sub-packages found and also strip out all + // matches we've found so far. Note, though, that if we find + // our own `Cargo.toml` we keep going. + Some("Cargo.toml") => { + let path = file_path.parent().unwrap(); + if path != pkg_path { + warn!("subpackage found: {}", path.display()); + ret.retain(|p| !p.starts_with(path)); + subpackages_found.push(path.to_path_buf()); + continue; + } + } + + _ => {} + } + + // If this file is part of any other sub-package we've found so far, + // skip it. + if subpackages_found.iter().any(|p| file_path.starts_with(p)) { + continue; + } + + if is_dir.unwrap_or_else(|| file_path.is_dir()) { + warn!(" found submodule {}", file_path.display()); + let rel = util::without_prefix(&file_path, root).unwrap(); + let rel = rel.to_str() + .ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?; + // Git submodules are currently only named through `/` path + // separators, explicitly not `\` which windows uses. Who knew? + let rel = rel.replace(r"\", "/"); + match repo.find_submodule(&rel).and_then(|s| s.open()) { + Ok(repo) => { + let files = self.list_files_git(pkg, repo, filter)?; + ret.extend(files.into_iter()); + } + Err(..) => { + PathSource::walk(&file_path, &mut ret, false, filter)?; + } + } + } else if (*filter)(&file_path)? { + // We found a file! + warn!(" found {}", file_path.display()); + ret.push(file_path); + } + } + return Ok(ret); + + #[cfg(unix)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(path.join(::from_bytes(data))) + } + #[cfg(windows)] + fn join(path: &Path, data: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(data) { + Ok(s) => Ok(path.join(s)), + Err(..) => Err(internal( + "cannot process path in git with a non \ + unicode filename", + )), + } + } + } + + fn list_files_walk( + &self, + pkg: &Package, + filter: &mut FnMut(&Path) -> CargoResult, + ) -> CargoResult> { + let mut ret = Vec::new(); + PathSource::walk(pkg.root(), &mut ret, true, filter)?; + Ok(ret) + } + + fn walk( + path: &Path, + ret: &mut Vec, + is_root: bool, + filter: &mut FnMut(&Path) -> CargoResult, + ) -> CargoResult<()> { + if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) { + if (*filter)(path)? { + ret.push(path.to_path_buf()); + } + return Ok(()); + } + // Don't recurse into any sub-packages that we have + if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { + return Ok(()); + } + + // For package integration tests, we need to sort the paths in a deterministic order to + // be able to match stdout warnings in the same order. + // + // TODO: Drop collect and sort after transition period and dropping wraning tests. + // See + // and + let mut entries: Vec = fs::read_dir(path)?.map(|e| e.unwrap()).collect(); + entries.sort_by(|a, b| a.path().as_os_str().cmp(b.path().as_os_str())); + for entry in entries { + let path = entry.path(); + let name = path.file_name().and_then(|s| s.to_str()); + // Skip dotfile directories + if name.map(|s| s.starts_with('.')) == Some(true) { + continue; + } + if is_root { + // Skip cargo artifacts + match name { + Some("target") | Some("Cargo.lock") => continue, + _ => {} + } + } + PathSource::walk(&path, ret, false, filter)?; + } + Ok(()) + } +} + +impl<'cfg> Debug for PathSource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "the paths source") + } +} + +impl<'cfg> Registry for PathSource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + for s in self.packages.iter().map(|p| p.summary()) { + if dep.matches(s) { + f(s.clone()) + } + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } +} + +impl<'cfg> Source for PathSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + if !self.updated { + let packages = self.read_packages()?; + self.packages.extend(packages.into_iter()); + self.updated = true; + } + + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!("getting packages; id={}", id); + + let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); + pkg.cloned() + .ok_or_else(|| internal(format!("failed to find {} in path source", id))) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + if !self.updated { + return Err(internal("BUG: source was not updated")); + } + + let mut max = FileTime::zero(); + let mut max_path = PathBuf::from(""); + for file in self.list_files(pkg)? { + // An fs::stat error here is either because path is a + // broken symlink, a permissions error, or a race + // condition where this path was rm'ed - either way, + // we can ignore the error and treat the path's mtime + // as 0. + let mtime = fs::metadata(&file) + .map(|meta| FileTime::from_last_modification_time(&meta)) + .unwrap_or(FileTime::zero()); + warn!("{} {}", mtime, file.display()); + if mtime > max { + max = mtime; + max_path = file; + } + } + trace!("fingerprint {}: {}", self.path.display(), max); + Ok(format!("{} ({})", max, max_path.display())) + } +} diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs new file mode 100644 index 000000000..55da37612 --- /dev/null +++ b/src/cargo/sources/registry/index.rs @@ -0,0 +1,210 @@ +use std::collections::HashMap; +use std::path::Path; +use std::str; + +use serde_json; +use semver::Version; + +use core::dependency::Dependency; +use core::{PackageId, SourceId, Summary}; +use sources::registry::{RegistryPackage, INDEX_LOCK}; +use sources::registry::RegistryData; +use util::{internal, CargoResult, Config, Filesystem}; + +pub struct RegistryIndex<'cfg> { + source_id: SourceId, + path: Filesystem, + cache: HashMap>, + hashes: HashMap>, // (name, vers) => cksum + config: &'cfg Config, + locked: bool, +} + +impl<'cfg> RegistryIndex<'cfg> { + pub fn new( + id: &SourceId, + path: &Filesystem, + config: &'cfg Config, + locked: bool, + ) -> RegistryIndex<'cfg> { + RegistryIndex { + source_id: id.clone(), + path: path.clone(), + cache: HashMap::new(), + hashes: HashMap::new(), + config, + locked, + } + } + + /// Return the hash listed for a specified PackageId. + pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult { + let name = &*pkg.name(); + let version = pkg.version(); + if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) { + return Ok(s.clone()); + } + // Ok, we're missing the key, so parse the index file to load it. + self.summaries(name, load)?; + self.hashes + .get(name) + .and_then(|v| v.get(version)) + .ok_or_else(|| internal(format!("no hash listed for {}", pkg))) + .map(|s| s.clone()) + } + + /// Parse the on-disk metadata for the package provided + /// + /// Returns a list of pairs of (summary, yanked) for the package name + /// specified. + pub fn summaries( + &mut self, + name: &str, + load: &mut RegistryData, + ) -> CargoResult<&Vec<(Summary, bool)>> { + if self.cache.contains_key(name) { + return Ok(&self.cache[name]); + } + let summaries = self.load_summaries(name, load)?; + self.cache.insert(name.to_string(), summaries); + Ok(&self.cache[name]) + } + + fn load_summaries( + &mut self, + name: &str, + load: &mut RegistryData, + ) -> CargoResult> { + let (root, _lock) = if self.locked { + let lock = self.path + .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index"); + match lock { + Ok(lock) => (lock.path().parent().unwrap().to_path_buf(), Some(lock)), + Err(_) => return Ok(Vec::new()), + } + } else { + (self.path.clone().into_path_unlocked(), None) + }; + + let fs_name = name.chars() + .flat_map(|c| c.to_lowercase()) + .collect::(); + + // see module comment for why this is structured the way it is + let path = match fs_name.len() { + 1 => format!("1/{}", fs_name), + 2 => format!("2/{}", fs_name), + 3 => format!("3/{}/{}", &fs_name[..1], fs_name), + _ => format!("{}/{}/{}", &fs_name[0..2], &fs_name[2..4], fs_name), + }; + let mut ret = Vec::new(); + let mut hit_closure = false; + let err = load.load(&root, Path::new(&path), &mut |contents| { + hit_closure = true; + let contents = str::from_utf8(contents) + .map_err(|_| format_err!("registry index file was not valid utf-8"))?; + ret.reserve(contents.lines().count()); + let lines = contents.lines().map(|s| s.trim()).filter(|l| !l.is_empty()); + + let online = !self.config.cli_unstable().offline; + // Attempt forwards-compatibility on the index by ignoring + // everything that we ourselves don't understand, that should + // allow future cargo implementations to break the + // interpretation of each line here and older cargo will simply + // ignore the new lines. + ret.extend(lines.filter_map(|line| { + self.parse_registry_package(line).ok().and_then(|v| { + if online || load.is_crate_downloaded(v.0.package_id()) { + Some(v) + } else { + None + } + }) + })); + + Ok(()) + }); + + // We ignore lookup failures as those are just crates which don't exist + // or we haven't updated the registry yet. If we actually ran the + // closure though then we care about those errors. + if hit_closure { + err?; + } + + Ok(ret) + } + + /// Parse a line from the registry's index file into a Summary for a + /// package. + /// + /// The returned boolean is whether or not the summary has been yanked. + fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> { + let RegistryPackage { + name, + vers, + cksum, + deps, + features, + yanked, + links, + } = super::DEFAULT_ID.with(|slot| { + *slot.borrow_mut() = Some(self.source_id.clone()); + let res = serde_json::from_str::(line); + drop(slot.borrow_mut().take()); + res + })?; + let pkgid = PackageId::new(&name, &vers, &self.source_id)?; + let summary = Summary::new(pkgid, deps.inner, features, links)?; + let summary = summary.set_checksum(cksum.clone()); + if self.hashes.contains_key(&name[..]) { + self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum); + } else { + self.hashes + .entry(name.into_owned()) + .or_insert_with(HashMap::new) + .insert(vers, cksum); + } + Ok((summary, yanked.unwrap_or(false))) + } + + pub fn query( + &mut self, + dep: &Dependency, + load: &mut RegistryData, + f: &mut FnMut(Summary), + ) -> CargoResult<()> { + let source_id = self.source_id.clone(); + let summaries = self.summaries(&*dep.name(), load)?; + let summaries = summaries + .iter() + .filter(|&&(_, yanked)| dep.source_id().precise().is_some() || !yanked) + .map(|s| s.0.clone()); + + // Handle `cargo update --precise` here. If specified, our own source + // will have a precise version listed of the form + // `=o->` where `` is the name of a crate on + // this source, `` is the version installed and ` is the + // version requested (argument to `--precise`). + let summaries = summaries.filter(|s| match source_id.precise() { + Some(p) if p.starts_with(&*dep.name()) && p[dep.name().len()..].starts_with('=') => { + let mut vers = p[dep.name().len() + 1..].splitn(2, "->"); + if dep.version_req() + .matches(&Version::parse(vers.next().unwrap()).unwrap()) + { + vers.next().unwrap() == s.version().to_string() + } else { + true + } + } + _ => true, + }); + + for summary in summaries { + if dep.matches(&summary) { + f(summary); + } + } + Ok(()) + } +} diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs new file mode 100644 index 000000000..fa97c42a2 --- /dev/null +++ b/src/cargo/sources/registry/local.rs @@ -0,0 +1,103 @@ +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::Path; + +use core::PackageId; +use hex; +use sources::registry::{RegistryConfig, RegistryData}; +use util::FileLock; +use util::paths; +use util::{Config, Filesystem, Sha256}; +use util::errors::{CargoResult, CargoResultExt}; + +pub struct LocalRegistry<'cfg> { + index_path: Filesystem, + root: Filesystem, + src_path: Filesystem, + config: &'cfg Config, +} + +impl<'cfg> LocalRegistry<'cfg> { + pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> { + LocalRegistry { + src_path: config.registry_source_path().join(name), + index_path: Filesystem::new(root.join("index")), + root: Filesystem::new(root.to_path_buf()), + config, + } + } +} + +impl<'cfg> RegistryData for LocalRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn load( + &self, + root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()> { + data(&paths::read_bytes(&root.join(path))?) + } + + fn config(&mut self) -> CargoResult> { + // Local registries don't have configuration for remote APIs or anything + // like that + Ok(None) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Nothing to update, we just use what's on disk. Verify it actually + // exists though. We don't use any locks as we're just checking whether + // these directories exist. + let root = self.root.clone().into_path_unlocked(); + if !root.is_dir() { + bail!("local registry path is not a directory: {}", root.display()) + } + let index_path = self.index_path.clone().into_path_unlocked(); + if !index_path.is_dir() { + bail!( + "local registry index path is not a directory: {}", + index_path.display() + ) + } + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult { + let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); + let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?; + + // If we've already got an unpacked version of this crate, then skip the + // checksum below as it is in theory already verified. + let dst = format!("{}-{}", pkg.name(), pkg.version()); + if self.src_path.join(dst).into_path_unlocked().exists() { + return Ok(crate_file); + } + + self.config.shell().status("Unpacking", pkg)?; + + // We don't actually need to download anything per-se, we just need to + // verify the checksum matches the .crate file itself. + let mut state = Sha256::new(); + let mut buf = [0; 64 * 1024]; + loop { + let n = crate_file + .read(&mut buf) + .chain_err(|| format!("failed to read `{}`", crate_file.path().display()))?; + if n == 0 { + break; + } + state.update(&buf[..n]); + } + if hex::encode(state.finish()) != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + crate_file.seek(SeekFrom::Start(0))?; + + Ok(crate_file) + } +} diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs new file mode 100644 index 000000000..3a8183b28 --- /dev/null +++ b/src/cargo/sources/registry/mod.rs @@ -0,0 +1,539 @@ +//! A `Source` for registry-based packages. +//! +//! # What's a Registry? +//! +//! Registries are central locations where packages can be uploaded to, +//! discovered, and searched for. The purpose of a registry is to have a +//! location that serves as permanent storage for versions of a crate over time. +//! +//! Compared to git sources, a registry provides many packages as well as many +//! versions simultaneously. Git sources can also have commits deleted through +//! rebasings where registries cannot have their versions deleted. +//! +//! # The Index of a Registry +//! +//! One of the major difficulties with a registry is that hosting so many +//! packages may quickly run into performance problems when dealing with +//! dependency graphs. It's infeasible for cargo to download the entire contents +//! of the registry just to resolve one package's dependencies, for example. As +//! a result, cargo needs some efficient method of querying what packages are +//! available on a registry, what versions are available, and what the +//! dependencies for each version is. +//! +//! One method of doing so would be having the registry expose an HTTP endpoint +//! which can be queried with a list of packages and a response of their +//! dependencies and versions is returned. This is somewhat inefficient however +//! as we may have to hit the endpoint many times and we may have already +//! queried for much of the data locally already (for other packages, for +//! example). This also involves inventing a transport format between the +//! registry and Cargo itself, so this route was not taken. +//! +//! Instead, Cargo communicates with registries through a git repository +//! referred to as the Index. The Index of a registry is essentially an easily +//! query-able version of the registry's database for a list of versions of a +//! package as well as a list of dependencies for each version. +//! +//! Using git to host this index provides a number of benefits: +//! +//! * The entire index can be stored efficiently locally on disk. This means +//! that all queries of a registry can happen locally and don't need to touch +//! the network. +//! +//! * Updates of the index are quite efficient. Using git buys incremental +//! updates, compressed transmission, etc for free. The index must be updated +//! each time we need fresh information from a registry, but this is one +//! update of a git repository that probably hasn't changed a whole lot so +//! it shouldn't be too expensive. +//! +//! Additionally, each modification to the index is just appending a line at +//! the end of a file (the exact format is described later). This means that +//! the commits for an index are quite small and easily applied/compressable. +//! +//! ## The format of the Index +//! +//! The index is a store for the list of versions for all packages known, so its +//! format on disk is optimized slightly to ensure that `ls registry` doesn't +//! produce a list of all packages ever known. The index also wants to ensure +//! that there's not a million files which may actually end up hitting +//! filesystem limits at some point. To this end, a few decisions were made +//! about the format of the registry: +//! +//! 1. Each crate will have one file corresponding to it. Each version for a +//! crate will just be a line in this file. +//! 2. There will be two tiers of directories for crate names, under which +//! crates corresponding to those tiers will be located. +//! +//! As an example, this is an example hierarchy of an index: +//! +//! ```notrust +//! . +//! ├── 3 +//! │   └── u +//! │   └── url +//! ├── bz +//! │   └── ip +//! │   └── bzip2 +//! ├── config.json +//! ├── en +//! │   └── co +//! │   └── encoding +//! └── li +//!    ├── bg +//!    │   └── libgit2 +//!    └── nk +//!    └── link-config +//! ``` +//! +//! The root of the index contains a `config.json` file with a few entries +//! corresponding to the registry (see `RegistryConfig` below). +//! +//! Otherwise, there are three numbered directories (1, 2, 3) for crates with +//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the +//! crate files underneath them, while the 3 directory is sharded by the first +//! letter of the crate name. +//! +//! Otherwise the top-level directory contains many two-letter directory names, +//! each of which has many sub-folders with two letters. At the end of all these +//! are the actual crate files themselves. +//! +//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as +//! efficient lookup based on the crate name itself. +//! +//! ## Crate files +//! +//! Each file in the index is the history of one crate over time. Each line in +//! the file corresponds to one version of a crate, stored in JSON format (see +//! the `RegistryPackage` structure below). +//! +//! As new versions are published, new lines are appended to this file. The only +//! modifications to this file that should happen over time are yanks of a +//! particular version. +//! +//! # Downloading Packages +//! +//! The purpose of the Index was to provide an efficient method to resolve the +//! dependency graph for a package. So far we only required one network +//! interaction to update the registry's repository (yay!). After resolution has +//! been performed, however we need to download the contents of packages so we +//! can read the full manifest and build the source code. +//! +//! To accomplish this, this source's `download` method will make an HTTP +//! request per-package requested to download tarballs into a local cache. These +//! tarballs will then be unpacked into a destination folder. +//! +//! Note that because versions uploaded to the registry are frozen forever that +//! the HTTP download and unpacking can all be skipped if the version has +//! already been downloaded and unpacked. This caching allows us to only +//! download a package when absolutely necessary. +//! +//! # Filesystem Hierarchy +//! +//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: +//! +//! ```notrust +//! # A folder under which all registry metadata is hosted (similar to +//! # $HOME/.cargo/git) +//! $HOME/.cargo/registry/ +//! +//! # For each registry that cargo knows about (keyed by hostname + hash) +//! # there is a folder which is the checked out version of the index for +//! # the registry in this location. Note that this is done so cargo can +//! # support multiple registries simultaneously +//! index/ +//! registry1-/ +//! registry2-/ +//! ... +//! +//! # This folder is a cache for all downloaded tarballs from a registry. +//! # Once downloaded and verified, a tarball never changes. +//! cache/ +//! registry1-/-.crate +//! ... +//! +//! # Location in which all tarballs are unpacked. Each tarball is known to +//! # be frozen after downloading, so transitively this folder is also +//! # frozen once its unpacked (it's never unpacked again) +//! src/ +//! registry1-/-/... +//! ... +//! ``` + +use std::borrow::Cow; +use std::cell::RefCell; +use std::collections::BTreeMap; +use std::fmt; +use std::fs::File; +use std::path::{Path, PathBuf}; + +use flate2::read::GzDecoder; +use semver::Version; +use serde::de; +use tar::Archive; + +use core::{Package, PackageId, Registry, Source, SourceId, Summary}; +use core::dependency::{Dependency, Kind}; +use sources::PathSource; +use util::{internal, CargoResult, Config, FileLock, Filesystem}; +use util::errors::CargoResultExt; +use util::hex; +use util::to_url::ToUrl; + +const INDEX_LOCK: &str = ".cargo-index-lock"; +pub const CRATES_IO: &str = "https://github.com/rust-lang/crates.io-index"; +const CRATE_TEMPLATE: &str = "{crate}"; +const VERSION_TEMPLATE: &str = "{version}"; + +pub struct RegistrySource<'cfg> { + source_id: SourceId, + src_path: Filesystem, + config: &'cfg Config, + updated: bool, + ops: Box, + index: index::RegistryIndex<'cfg>, + index_locked: bool, +} + +#[derive(Deserialize)] +pub struct RegistryConfig { + /// Download endpoint for all crates. + /// + /// The string is a template which will generate the download URL for the + /// tarball of a specific version of a crate. The substrings `{crate}` and + /// `{version}` will be replaced with the crate's name and version + /// respectively. + /// + /// For backwards compatibility, if the string does not contain `{crate}` or + /// `{version}`, it will be extended with `/{crate}/{version}/download` to + /// support registries like crates.io which were crated before the + /// templating setup was created. + pub dl: String, + + /// API endpoint for the registry. This is what's actually hit to perform + /// operations like yanks, owner modifications, publish new crates, etc. + pub api: Option, +} + +#[derive(Deserialize)] +struct RegistryPackage<'a> { + name: Cow<'a, str>, + vers: Version, + deps: DependencyList, + features: BTreeMap>, + cksum: String, + yanked: Option, + #[serde(default)] links: Option, +} + +struct DependencyList { + inner: Vec, +} + +#[derive(Deserialize)] +struct RegistryDependency<'a> { + name: Cow<'a, str>, + req: Cow<'a, str>, + features: Vec, + optional: bool, + default_features: bool, + target: Option>, + kind: Option>, + registry: Option, +} + +pub trait RegistryData { + fn index_path(&self) -> &Filesystem; + fn load( + &self, + _root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()>; + fn config(&mut self) -> CargoResult>; + fn update_index(&mut self) -> CargoResult<()>; + fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult; + + fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { + true + } +} + +mod index; +mod remote; +mod local; + +fn short_name(id: &SourceId) -> String { + let hash = hex::short_hash(id); + let ident = id.url().host_str().unwrap_or("").to_string(); + format!("{}-{}", ident, hash) +} + +impl<'cfg> RegistrySource<'cfg> { + pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = remote::RemoteRegistry::new(source_id, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), true) + } + + pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = local::LocalRegistry::new(path, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), false) + } + + fn new( + source_id: &SourceId, + config: &'cfg Config, + name: &str, + ops: Box, + index_locked: bool, + ) -> RegistrySource<'cfg> { + RegistrySource { + src_path: config.registry_source_path().join(name), + config, + source_id: source_id.clone(), + updated: false, + index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked), + index_locked, + ops, + } + } + + /// Decode the configuration stored within the registry. + /// + /// This requires that the index has been at least checked out. + pub fn config(&mut self) -> CargoResult> { + self.ops.config() + } + + /// Unpacks a downloaded package into a location where it's ready to be + /// compiled. + /// + /// No action is taken if the source looks like it's already unpacked. + fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult { + let dst = self.src_path + .join(&format!("{}-{}", pkg.name(), pkg.version())); + dst.create_dir()?; + // Note that we've already got the `tarball` locked above, and that + // implies a lock on the unpacked destination as well, so this access + // via `into_path_unlocked` should be ok. + let dst = dst.into_path_unlocked(); + let ok = dst.join(".cargo-ok"); + if ok.exists() { + return Ok(dst); + } + + let gz = GzDecoder::new(tarball.file()); + let mut tar = Archive::new(gz); + let prefix = dst.file_name().unwrap(); + let parent = dst.parent().unwrap(); + for entry in tar.entries()? { + let mut entry = entry.chain_err(|| "failed to iterate over archive")?; + let entry_path = entry + .path() + .chain_err(|| "failed to read entry path")? + .into_owned(); + + // We're going to unpack this tarball into the global source + // directory, but we want to make sure that it doesn't accidentally + // (or maliciously) overwrite source code from other crates. Cargo + // itself should never generate a tarball that hits this error, and + // crates.io should also block uploads with these sorts of tarballs, + // but be extra sure by adding a check here as well. + if !entry_path.starts_with(prefix) { + bail!( + "invalid tarball downloaded, contains \ + a file at {:?} which isn't under {:?}", + entry_path, + prefix + ) + } + + // Once that's verified, unpack the entry as usual. + entry + .unpack_in(parent) + .chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; + } + File::create(&ok)?; + Ok(dst.clone()) + } + + fn do_update(&mut self) -> CargoResult<()> { + self.ops.update_index()?; + let path = self.ops.index_path(); + self.index = + index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked); + Ok(()) + } +} + +impl<'cfg> Registry for RegistrySource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + // If this is a precise dependency, then it came from a lockfile and in + // theory the registry is known to contain this version. If, however, we + // come back with no summaries, then our registry may need to be + // updated, so we fall back to performing a lazy update. + if dep.source_id().precise().is_some() && !self.updated { + let mut called = false; + self.index.query(dep, &mut *self.ops, &mut |s| { + called = true; + f(s); + })?; + if called { + return Ok(()); + } else { + self.do_update()?; + } + } + + self.index.query(dep, &mut *self.ops, f) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + false + } +} + +impl<'cfg> Source for RegistrySource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + // If we have an imprecise version then we don't know what we're going + // to look for, so we always attempt to perform an update here. + // + // If we have a precise version, then we'll update lazily during the + // querying phase. Note that precise in this case is only + // `Some("locked")` as other `Some` values indicate a `cargo update + // --precise` request + if self.source_id.precise() != Some("locked") { + self.do_update()?; + } + Ok(()) + } + + fn download(&mut self, package: &PackageId) -> CargoResult { + let hash = self.index.hash(package, &mut *self.ops)?; + let path = self.ops.download(package, &hash)?; + let path = self.unpack_package(package, &path) + .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?; + let mut src = PathSource::new(&path, &self.source_id, self.config); + src.update()?; + let pkg = src.download(package)?; + + // Unfortunately the index and the actual Cargo.toml in the index can + // differ due to historical Cargo bugs. To paper over these we trash the + // *summary* loaded from the Cargo.toml we just downloaded with the one + // we loaded from the index. + let summaries = self.index.summaries(&*package.name(), &mut *self.ops)?; + let summary = summaries + .iter() + .map(|s| &s.0) + .find(|s| s.package_id() == package) + .expect("summary not found"); + let mut manifest = pkg.manifest().clone(); + manifest.set_summary(summary.clone()); + Ok(Package::new(manifest, pkg.manifest_path())) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } +} + +// TODO: this is pretty unfortunate, ideally we'd use `DeserializeSeed` which +// is intended for "deserializing with context" but that means we couldn't +// use `#[derive(Deserialize)]` on `RegistryPackage` unfortunately. +// +// I'm told, however, that https://github.com/serde-rs/serde/pull/909 will solve +// all our problems here. Until that lands this thread local is just a +// workaround in the meantime. +// +// If you're reading this and find this thread local funny, check to see if that +// PR is merged. If it is then let's ditch this thread local! +thread_local!(static DEFAULT_ID: RefCell> = Default::default()); + +impl<'de> de::Deserialize<'de> for DependencyList { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + return deserializer.deserialize_seq(Visitor); + + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = DependencyList; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a list of dependencies") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: de::SeqAccess<'de>, + { + let mut ret = Vec::new(); + if let Some(size) = seq.size_hint() { + ret.reserve(size); + } + while let Some(element) = seq.next_element::()? { + ret.push(parse_registry_dependency(element).map_err(|e| de::Error::custom(e))?); + } + + Ok(DependencyList { inner: ret }) + } + } + } +} + +/// Converts an encoded dependency in the registry to a cargo dependency +fn parse_registry_dependency(dep: RegistryDependency) -> CargoResult { + let RegistryDependency { + name, + req, + mut features, + optional, + default_features, + target, + kind, + registry, + } = dep; + + let id = if let Some(registry) = registry { + SourceId::for_registry(®istry.to_url()?)? + } else { + DEFAULT_ID.with(|id| id.borrow().as_ref().unwrap().clone()) + }; + + let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?; + let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { + "dev" => Kind::Development, + "build" => Kind::Build, + _ => Kind::Normal, + }; + + let platform = match target { + Some(target) => Some(target.parse()?), + None => None, + }; + + // Unfortunately older versions of cargo and/or the registry ended up + // publishing lots of entries where the features array contained the + // empty feature, "", inside. This confuses the resolution process much + // later on and these features aren't actually valid, so filter them all + // out here. + features.retain(|s| !s.is_empty()); + + dep.set_optional(optional) + .set_default_features(default_features) + .set_features(features) + .set_platform(platform) + .set_kind(kind); + + Ok(dep) +} diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs new file mode 100644 index 000000000..3d50eda9c --- /dev/null +++ b/src/cargo/sources/registry/remote.rs @@ -0,0 +1,289 @@ +use std::cell::{Cell, Ref, RefCell}; +use std::fmt::Write as FmtWrite; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::mem; +use std::path::Path; +use std::str; + +use git2; +use hex; +use serde_json; +use lazycell::LazyCell; + +use core::{PackageId, SourceId}; +use sources::git; +use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE}; +use util::network; +use util::{FileLock, Filesystem}; +use util::{Config, Progress, Sha256, ToUrl}; +use util::errors::{CargoResult, CargoResultExt, HttpNot200}; + +pub struct RemoteRegistry<'cfg> { + index_path: Filesystem, + cache_path: Filesystem, + source_id: SourceId, + config: &'cfg Config, + tree: RefCell>>, + repo: LazyCell, + head: Cell>, +} + +impl<'cfg> RemoteRegistry<'cfg> { + pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { + RemoteRegistry { + index_path: config.registry_index_path().join(name), + cache_path: config.registry_cache_path().join(name), + source_id: source_id.clone(), + config, + tree: RefCell::new(None), + repo: LazyCell::new(), + head: Cell::new(None), + } + } + + fn repo(&self) -> CargoResult<&git2::Repository> { + self.repo.try_borrow_with(|| { + let path = self.index_path.clone().into_path_unlocked(); + + // Fast path without a lock + if let Ok(repo) = git2::Repository::open(&path) { + return Ok(repo); + } + + // Ok, now we need to lock and try the whole thing over again. + let lock = + self.index_path + .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?; + match git2::Repository::open(&path) { + Ok(repo) => Ok(repo), + Err(_) => { + let _ = lock.remove_siblings(); + + // Note that we'd actually prefer to use a bare repository + // here as we're not actually going to check anything out. + // All versions of Cargo, though, share the same CARGO_HOME, + // so for compatibility with older Cargo which *does* do + // checkouts we make sure to initialize a new full + // repository (not a bare one). + // + // We should change this to `init_bare` whenever we feel + // like enough time has passed or if we change the directory + // that the folder is located in, such as by changing the + // hash at the end of the directory. + Ok(git2::Repository::init(&path)?) + } + } + }) + } + + fn head(&self) -> CargoResult { + if self.head.get().is_none() { + let oid = self.repo()?.refname_to_id("refs/remotes/origin/master")?; + self.head.set(Some(oid)); + } + Ok(self.head.get().unwrap()) + } + + fn tree(&self) -> CargoResult> { + { + let tree = self.tree.borrow(); + if tree.is_some() { + return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); + } + } + let repo = self.repo()?; + let commit = repo.find_commit(self.head()?)?; + let tree = commit.tree()?; + + // Unfortunately in libgit2 the tree objects look like they've got a + // reference to the repository object which means that a tree cannot + // outlive the repository that it came from. Here we want to cache this + // tree, though, so to accomplish this we transmute it to a static + // lifetime. + // + // Note that we don't actually hand out the static lifetime, instead we + // only return a scoped one from this function. Additionally the repo + // we loaded from (above) lives as long as this object + // (`RemoteRegistry`) so we then just need to ensure that the tree is + // destroyed first in the destructor, hence the destructor on + // `RemoteRegistry` below. + let tree = unsafe { mem::transmute::>(tree) }; + *self.tree.borrow_mut() = Some(tree); + Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) + } +} + +impl<'cfg> RegistryData for RemoteRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn load( + &self, + _root: &Path, + path: &Path, + data: &mut FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()> { + // Note that the index calls this method and the filesystem is locked + // in the index, so we don't need to worry about an `update_index` + // happening in a different process. + let repo = self.repo()?; + let tree = self.tree()?; + let entry = tree.get_path(path)?; + let object = entry.to_object(repo)?; + let blob = match object.as_blob() { + Some(blob) => blob, + None => bail!("path `{}` is not a blob in the git repo", path.display()), + }; + data(blob.content()) + } + + fn config(&mut self) -> CargoResult> { + self.repo()?; // create intermediate dirs and initialize the repo + let _lock = + self.index_path + .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index")?; + let mut config = None; + self.load(Path::new(""), Path::new("config.json"), &mut |json| { + config = Some(serde_json::from_slice(json)?); + Ok(()) + })?; + Ok(config) + } + + fn update_index(&mut self) -> CargoResult<()> { + if self.config.cli_unstable().offline { + return Ok(()); + } + if self.config.cli_unstable().no_index_update { + return Ok(()); + } + + // Ensure that we'll actually be able to acquire an HTTP handle later on + // once we start trying to download crates. This will weed out any + // problems with `.cargo/config` configuration related to HTTP. + // + // This way if there's a problem the error gets printed before we even + // hit the index, which may not actually read this configuration. + self.config.http()?; + + self.repo()?; + self.head.set(None); + *self.tree.borrow_mut() = None; + let _lock = + self.index_path + .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?; + self.config + .shell() + .status("Updating", self.source_id.display_registry())?; + + // git fetch origin master + let url = self.source_id.url(); + let refspec = "refs/heads/master:refs/remotes/origin/master"; + let repo = self.repo.borrow_mut().unwrap(); + git::fetch(repo, url, refspec, self.config) + .chain_err(|| format!("failed to fetch `{}`", url))?; + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult { + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let path = Path::new(&filename); + + // Attempt to open an read-only copy first to avoid an exclusive write + // lock and also work with read-only filesystems. Note that we check the + // length of the file like below to handle interrupted downloads. + // + // If this fails then we fall through to the exclusive path where we may + // have to redownload the file. + if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { + let meta = dst.file().metadata()?; + if meta.len() > 0 { + return Ok(dst); + } + } + let mut dst = self.cache_path.open_rw(path, self.config, &filename)?; + let meta = dst.file().metadata()?; + if meta.len() > 0 { + return Ok(dst); + } + self.config.shell().status("Downloading", pkg)?; + + let config = self.config()?.unwrap(); + let mut url = config.dl.clone(); + if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) { + write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); + } + let url = url.replace(CRATE_TEMPLATE, &*pkg.name()) + .replace(VERSION_TEMPLATE, &pkg.version().to_string()) + .to_url()?; + + // TODO: don't download into memory, but ensure that if we ctrl-c a + // download we should resume either from the start or the middle + // on the next time + let url = url.to_string(); + let mut handle = self.config.http()?.borrow_mut(); + handle.get(true)?; + handle.url(&url)?; + handle.follow_location(true)?; + let mut state = Sha256::new(); + let mut body = Vec::new(); + network::with_retry(self.config, || { + state = Sha256::new(); + body = Vec::new(); + let mut pb = Progress::new("Fetch", self.config); + { + handle.progress(true)?; + let mut handle = handle.transfer(); + handle.progress_function(|dl_total, dl_cur, _, _| { + pb.tick(dl_cur as usize, dl_total as usize).is_ok() + })?; + handle.write_function(|buf| { + state.update(buf); + body.extend_from_slice(buf); + Ok(buf.len()) + })?; + handle.perform()?; + } + let code = handle.response_code()?; + if code != 200 && code != 0 { + let url = handle.effective_url()?.unwrap_or(&url); + Err(HttpNot200 { + code, + url: url.to_string(), + }.into()) + } else { + Ok(()) + } + })?; + + // Verify what we just downloaded + if hex::encode(state.finish()) != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + dst.write_all(&body)?; + dst.seek(SeekFrom::Start(0))?; + Ok(dst) + } + + fn is_crate_downloaded(&self, pkg: &PackageId) -> bool { + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let path = Path::new(&filename); + + if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { + if let Ok(meta) = dst.file().metadata() { + return meta.len() > 0; + } + } + false + } +} + +impl<'cfg> Drop for RemoteRegistry<'cfg> { + fn drop(&mut self) { + // Just be sure to drop this before our other fields + self.tree.borrow_mut().take(); + } +} diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs new file mode 100644 index 000000000..647df4c23 --- /dev/null +++ b/src/cargo/sources/replaced.rs @@ -0,0 +1,74 @@ +use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary}; +use util::errors::{CargoResult, CargoResultExt}; + +pub struct ReplacedSource<'cfg> { + to_replace: SourceId, + replace_with: SourceId, + inner: Box, +} + +impl<'cfg> ReplacedSource<'cfg> { + pub fn new( + to_replace: &SourceId, + replace_with: &SourceId, + src: Box, + ) -> ReplacedSource<'cfg> { + ReplacedSource { + to_replace: to_replace.clone(), + replace_with: replace_with.clone(), + inner: src, + } + } +} + +impl<'cfg> Registry for ReplacedSource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (&self.replace_with, &self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner + .query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }) + .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + Ok(()) + } + + fn supports_checksums(&self) -> bool { + self.inner.supports_checksums() + } + + fn requires_precise(&self) -> bool { + self.inner.requires_precise() + } +} + +impl<'cfg> Source for ReplacedSource<'cfg> { + fn source_id(&self) -> &SourceId { + &self.to_replace + } + + fn update(&mut self) -> CargoResult<()> { + self.inner + .update() + .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?; + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + let id = id.with_source_id(&self.replace_with); + let pkg = self.inner + .download(&id) + .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + Ok(pkg.map_source(&self.replace_with, &self.to_replace)) + } + + fn fingerprint(&self, id: &Package) -> CargoResult { + self.inner.fingerprint(id) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let id = id.with_source_id(&self.replace_with); + self.inner.verify(&id) + } +} diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs new file mode 100644 index 000000000..03de8444f --- /dev/null +++ b/src/cargo/util/cfg.rs @@ -0,0 +1,263 @@ +use std::str::{self, FromStr}; +use std::iter; +use std::fmt; + +use util::{CargoError, CargoResult}; + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Cfg { + Name(String), + KeyPair(String, String), +} + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum CfgExpr { + Not(Box), + All(Vec), + Any(Vec), + Value(Cfg), +} + +#[derive(PartialEq)] +enum Token<'a> { + LeftParen, + RightParen, + Ident(&'a str), + Comma, + Equals, + String(&'a str), +} + +struct Tokenizer<'a> { + s: iter::Peekable>, + orig: &'a str, +} + +struct Parser<'a> { + t: iter::Peekable>, +} + +impl FromStr for Cfg { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.cfg()?; + if p.t.next().is_some() { + bail!("malformed cfg value or key/value pair: `{}`", s) + } + Ok(e) + } +} + +impl fmt::Display for Cfg { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Cfg::Name(ref s) => s.fmt(f), + Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v), + } + } +} + +impl CfgExpr { + pub fn matches(&self, cfg: &[Cfg]) -> bool { + match *self { + CfgExpr::Not(ref e) => !e.matches(cfg), + CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)), + CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)), + CfgExpr::Value(ref e) => cfg.contains(e), + } + } +} + +impl FromStr for CfgExpr { + type Err = CargoError; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.expr()?; + if p.t.next().is_some() { + bail!( + "can only have one cfg-expression, consider using all() or \ + any() explicitly" + ) + } + Ok(e) + } +} + +impl fmt::Display for CfgExpr { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + CfgExpr::Not(ref e) => write!(f, "not({})", e), + CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)), + CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)), + CfgExpr::Value(ref e) => write!(f, "{}", e), + } + } +} + +struct CommaSep<'a, T: 'a>(&'a [T]); + +impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for (i, v) in self.0.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", v)?; + } + Ok(()) + } +} + +impl<'a> Parser<'a> { + fn new(s: &'a str) -> Parser<'a> { + Parser { + t: Tokenizer { + s: s.char_indices().peekable(), + orig: s, + }.peekable(), + } + } + + fn expr(&mut self) -> CargoResult { + match self.t.peek() { + Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => { + self.t.next(); + let mut e = Vec::new(); + self.eat(Token::LeftParen)?; + while !self.try(Token::RightParen) { + e.push(self.expr()?); + if !self.try(Token::Comma) { + self.eat(Token::RightParen)?; + break; + } + } + if op == "all" { + Ok(CfgExpr::All(e)) + } else { + Ok(CfgExpr::Any(e)) + } + } + Some(&Ok(Token::Ident("not"))) => { + self.t.next(); + self.eat(Token::LeftParen)?; + let e = self.expr()?; + self.eat(Token::RightParen)?; + Ok(CfgExpr::Not(Box::new(e))) + } + Some(&Ok(..)) => self.cfg().map(CfgExpr::Value), + Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()), + None => bail!( + "expected start of a cfg expression, \ + found nothing" + ), + } + } + + fn cfg(&mut self) -> CargoResult { + match self.t.next() { + Some(Ok(Token::Ident(name))) => { + let e = if self.try(Token::Equals) { + let val = match self.t.next() { + Some(Ok(Token::String(s))) => s, + Some(Ok(t)) => bail!("expected a string, found {}", t.classify()), + Some(Err(e)) => return Err(e), + None => bail!("expected a string, found nothing"), + }; + Cfg::KeyPair(name.to_string(), val.to_string()) + } else { + Cfg::Name(name.to_string()) + }; + Ok(e) + } + Some(Ok(t)) => bail!("expected identifier, found {}", t.classify()), + Some(Err(e)) => Err(e), + None => bail!("expected identifier, found nothing"), + } + } + + fn try(&mut self, token: Token<'a>) -> bool { + match self.t.peek() { + Some(&Ok(ref t)) if token == *t => {} + _ => return false, + } + self.t.next(); + true + } + + fn eat(&mut self, token: Token<'a>) -> CargoResult<()> { + match self.t.next() { + Some(Ok(ref t)) if token == *t => Ok(()), + Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()), + Some(Err(e)) => Err(e), + None => bail!("expected {}, but cfg expr ended", token.classify()), + } + } +} + +impl<'a> Iterator for Tokenizer<'a> { + type Item = CargoResult>; + + fn next(&mut self) -> Option>> { + loop { + match self.s.next() { + Some((_, ' ')) => {} + Some((_, '(')) => return Some(Ok(Token::LeftParen)), + Some((_, ')')) => return Some(Ok(Token::RightParen)), + Some((_, ',')) => return Some(Ok(Token::Comma)), + Some((_, '=')) => return Some(Ok(Token::Equals)), + Some((start, '"')) => { + while let Some((end, ch)) = self.s.next() { + if ch == '"' { + return Some(Ok(Token::String(&self.orig[start + 1..end]))); + } + } + return Some(Err(format_err!("unterminated string in cfg"))); + } + Some((start, ch)) if is_ident_start(ch) => { + while let Some(&(end, ch)) = self.s.peek() { + if !is_ident_rest(ch) { + return Some(Ok(Token::Ident(&self.orig[start..end]))); + } else { + self.s.next(); + } + } + return Some(Ok(Token::Ident(&self.orig[start..]))); + } + Some((_, ch)) => { + return Some(Err(format_err!( + "unexpected character in \ + cfg `{}`, expected parens, \ + a comma, an identifier, or \ + a string", + ch + ))) + } + None => return None, + } + } + } +} + +fn is_ident_start(ch: char) -> bool { + ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') +} + +fn is_ident_rest(ch: char) -> bool { + is_ident_start(ch) || ('0' <= ch && ch <= '9') +} + +impl<'a> Token<'a> { + fn classify(&self) -> &str { + match *self { + Token::LeftParen => "`(`", + Token::RightParen => "`)`", + Token::Ident(..) => "an identifier", + Token::Comma => "`,`", + Token::Equals => "`=`", + Token::String(..) => "a string", + } + } +} diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs new file mode 100644 index 000000000..19b4312bc --- /dev/null +++ b/src/cargo/util/config.rs @@ -0,0 +1,1025 @@ +use std::cell::{RefCell, RefMut}; +use std::collections::HashSet; +use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::collections::hash_map::HashMap; +use std::env; +use std::fmt; +use std::fs::{self, File}; +use std::io::SeekFrom; +use std::io::prelude::*; +use std::mem; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::{Once, ONCE_INIT}; + +use curl::easy::Easy; +use jobserver; +use serde::{Serialize, Serializer}; +use toml; +use lazycell::LazyCell; + +use core::shell::Verbosity; +use core::{CliUnstable, Shell, SourceId}; +use ops; +use url::Url; +use util::ToUrl; +use util::Rustc; +use util::errors::{internal, CargoError, CargoResult, CargoResultExt}; +use util::paths; +use util::toml as cargo_toml; +use util::Filesystem; + +use self::ConfigValue as CV; + +/// Configuration information for cargo. This is not specific to a build, it is information +/// relating to cargo itself. +/// +/// This struct implements `Default`: all fields can be inferred. +#[derive(Debug)] +pub struct Config { + /// The location of the users's 'home' directory. OS-dependent. + home_path: Filesystem, + /// Information about how to write messages to the shell + shell: RefCell, + /// Information on how to invoke the compiler (rustc) + rustc: LazyCell, + /// A collection of configuration options + values: LazyCell>, + /// The current working directory of cargo + cwd: PathBuf, + /// The location of the cargo executable (path to current process) + cargo_exe: LazyCell, + /// The location of the rustdoc executable + rustdoc: LazyCell, + /// Whether we are printing extra verbose messages + extra_verbose: bool, + /// `frozen` is set if we shouldn't access the network + frozen: bool, + /// `locked` is set if we should not update lock files + locked: bool, + /// A global static IPC control mechanism (used for managing parallel builds) + jobserver: Option, + /// Cli flags of the form "-Z something" + cli_flags: CliUnstable, + /// A handle on curl easy mode for http calls + easy: LazyCell>, + /// Cache of the `SourceId` for crates.io + crates_io_source_id: LazyCell, +} + +impl Config { + pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config { + static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; + static INIT: Once = ONCE_INIT; + + // This should be called early on in the process, so in theory the + // unsafety is ok here. (taken ownership of random fds) + INIT.call_once(|| unsafe { + if let Some(client) = jobserver::Client::from_env() { + GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); + } + }); + + Config { + home_path: Filesystem::new(homedir), + shell: RefCell::new(shell), + rustc: LazyCell::new(), + cwd, + values: LazyCell::new(), + cargo_exe: LazyCell::new(), + rustdoc: LazyCell::new(), + extra_verbose: false, + frozen: false, + locked: false, + jobserver: unsafe { + if GLOBAL_JOBSERVER.is_null() { + None + } else { + Some((*GLOBAL_JOBSERVER).clone()) + } + }, + cli_flags: CliUnstable::default(), + easy: LazyCell::new(), + crates_io_source_id: LazyCell::new(), + } + } + + pub fn default() -> CargoResult { + let shell = Shell::new(); + let cwd = + env::current_dir().chain_err(|| "couldn't get the current directory of the process")?; + let homedir = homedir(&cwd).ok_or_else(|| { + format_err!( + "Cargo couldn't find your home directory. \ + This probably means that $HOME was not set." + ) + })?; + Ok(Config::new(shell, cwd, homedir)) + } + + /// The user's cargo home directory (OS-dependent) + pub fn home(&self) -> &Filesystem { + &self.home_path + } + + /// The cargo git directory (`/git`) + pub fn git_path(&self) -> Filesystem { + self.home_path.join("git") + } + + /// The cargo registry index directory (`/registry/index`) + pub fn registry_index_path(&self) -> Filesystem { + self.home_path.join("registry").join("index") + } + + /// The cargo registry cache directory (`/registry/path`) + pub fn registry_cache_path(&self) -> Filesystem { + self.home_path.join("registry").join("cache") + } + + /// The cargo registry source directory (`/registry/src`) + pub fn registry_source_path(&self) -> Filesystem { + self.home_path.join("registry").join("src") + } + + /// Get a reference to the shell, for e.g. writing error messages + pub fn shell(&self) -> RefMut { + self.shell.borrow_mut() + } + + /// Get the path to the `rustdoc` executable + pub fn rustdoc(&self) -> CargoResult<&Path> { + self.rustdoc + .try_borrow_with(|| self.get_tool("rustdoc")) + .map(AsRef::as_ref) + } + + /// Get the path to the `rustc` executable + pub fn rustc(&self) -> CargoResult<&Rustc> { + self.rustc.try_borrow_with(|| { + Rustc::new( + self.get_tool("rustc")?, + self.maybe_get_tool("rustc_wrapper")?, + ) + }) + } + + /// Get the path to the `cargo` executable + pub fn cargo_exe(&self) -> CargoResult<&Path> { + self.cargo_exe + .try_borrow_with(|| { + fn from_current_exe() -> CargoResult { + // Try fetching the path to `cargo` using env::current_exe(). + // The method varies per operating system and might fail; in particular, + // it depends on /proc being mounted on Linux, and some environments + // (like containers or chroots) may not have that available. + let exe = env::current_exe()?.canonicalize()?; + Ok(exe) + } + + fn from_argv() -> CargoResult { + // Grab argv[0] and attempt to resolve it to an absolute path. + // If argv[0] has one component, it must have come from a PATH lookup, + // so probe PATH in that case. + // Otherwise, it has multiple components and is either: + // - a relative path (e.g. `./cargo`, `target/debug/cargo`), or + // - an absolute path (e.g. `/usr/local/bin/cargo`). + // In either case, Path::canonicalize will return the full absolute path + // to the target if it exists + let argv0 = env::args_os() + .map(PathBuf::from) + .next() + .ok_or(format_err!("no argv[0]"))?; + if argv0.components().count() == 1 { + probe_path(argv0) + } else { + Ok(argv0.canonicalize()?) + } + } + + fn probe_path(argv0: PathBuf) -> CargoResult { + let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?; + for path in env::split_paths(&paths) { + let candidate = PathBuf::from(path).join(&argv0); + if candidate.is_file() { + // PATH may have a component like "." in it, so we still need to + // canonicalize. + return Ok(candidate.canonicalize()?); + } + } + + bail!("no cargo executable candidate found in PATH") + } + + let exe = from_current_exe() + .or_else(|_| from_argv()) + .chain_err(|| "couldn't get the path to cargo executable")?; + Ok(exe) + }) + .map(AsRef::as_ref) + } + + pub fn values(&self) -> CargoResult<&HashMap> { + self.values.try_borrow_with(|| self.load_values()) + } + + pub fn set_values(&self, values: HashMap) -> CargoResult<()> { + if self.values.borrow().is_some() { + bail!("config values already found") + } + match self.values.fill(values) { + Ok(()) => Ok(()), + Err(_) => bail!("could not fill values"), + } + } + + pub fn cwd(&self) -> &Path { + &self.cwd + } + + pub fn target_dir(&self) -> CargoResult> { + if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { + Ok(Some(Filesystem::new(self.cwd.join(dir)))) + } else if let Some(val) = self.get_path("build.target-dir")? { + let val = self.cwd.join(val.val); + Ok(Some(Filesystem::new(val))) + } else { + Ok(None) + } + } + + fn get(&self, key: &str) -> CargoResult> { + let vals = self.values()?; + let mut parts = key.split('.').enumerate(); + let mut val = match vals.get(parts.next().unwrap().1) { + Some(val) => val, + None => return Ok(None), + }; + for (i, part) in parts { + match *val { + CV::Table(ref map, _) => { + val = match map.get(part) { + Some(val) => val, + None => return Ok(None), + } + } + CV::Integer(_, ref path) + | CV::String(_, ref path) + | CV::List(_, ref path) + | CV::Boolean(_, ref path) => { + let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1; + let key_so_far = &key[..idx]; + bail!( + "expected table for configuration key `{}`, \ + but found {} in {}", + key_so_far, + val.desc(), + path.display() + ) + } + } + } + Ok(Some(val.clone())) + } + + fn get_env(&self, key: &str) -> CargoResult>> + where + CargoError: From, + { + let key = key.replace(".", "_") + .replace("-", "_") + .chars() + .flat_map(|c| c.to_uppercase()) + .collect::(); + match env::var(&format!("CARGO_{}", key)) { + Ok(value) => Ok(Some(Value { + val: value.parse()?, + definition: Definition::Environment, + })), + Err(..) => Ok(None), + } + } + + pub fn get_string(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)); + } + match self.get(key)? { + Some(CV::String(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("string", key, val), + None => Ok(None), + } + } + + pub fn get_bool(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)); + } + match self.get(key)? { + Some(CV::Boolean(b, path)) => Ok(Some(Value { + val: b, + definition: Definition::Path(path), + })), + Some(val) => self.expected("bool", key, val), + None => Ok(None), + } + } + + fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf { + let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\')); + if is_path { + definition.root(self).join(value) + } else { + // A pathless name + PathBuf::from(value) + } + } + + pub fn get_path(&self, key: &str) -> CargoResult>> { + if let Some(val) = self.get_string(key)? { + Ok(Some(Value { + val: self.string_to_path(val.val, &val.definition), + definition: val.definition, + })) + } else { + Ok(None) + } + } + + pub fn get_path_and_args( + &self, + key: &str, + ) -> CargoResult)>>> { + if let Some(mut val) = self.get_list_or_split_string(key)? { + if !val.val.is_empty() { + return Ok(Some(Value { + val: ( + self.string_to_path(val.val.remove(0), &val.definition), + val.val, + ), + definition: val.definition, + })); + } + } + Ok(None) + } + + pub fn get_list(&self, key: &str) -> CargoResult>>> { + match self.get(key)? { + Some(CV::List(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("list", key, val), + None => Ok(None), + } + } + + pub fn get_list_or_split_string(&self, key: &str) -> CargoResult>>> { + match self.get_env::(key) { + Ok(Some(value)) => { + return Ok(Some(Value { + val: value.val.split(' ').map(str::to_string).collect(), + definition: value.definition, + })) + } + Err(err) => return Err(err), + Ok(None) => (), + } + + match self.get(key)? { + Some(CV::List(i, path)) => Ok(Some(Value { + val: i.into_iter().map(|(s, _)| s).collect(), + definition: Definition::Path(path), + })), + Some(CV::String(i, path)) => Ok(Some(Value { + val: i.split(' ').map(str::to_string).collect(), + definition: Definition::Path(path), + })), + Some(val) => self.expected("list or string", key, val), + None => Ok(None), + } + } + + pub fn get_table(&self, key: &str) -> CargoResult>>> { + match self.get(key)? { + Some(CV::Table(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("table", key, val), + None => Ok(None), + } + } + + pub fn get_i64(&self, key: &str) -> CargoResult>> { + if let Some(v) = self.get_env(key)? { + return Ok(Some(v)); + } + match self.get(key)? { + Some(CV::Integer(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("integer", key, val), + None => Ok(None), + } + } + + pub fn net_retry(&self) -> CargoResult { + match self.get_i64("net.retry")? { + Some(v) => { + let value = v.val; + if value < 0 { + bail!( + "net.retry must be positive, but found {} in {}", + v.val, + v.definition + ) + } else { + Ok(value) + } + } + None => Ok(2), + } + } + + pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { + val.expected(ty, key) + .map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e)) + } + + pub fn configure( + &mut self, + verbose: u32, + quiet: Option, + color: &Option, + frozen: bool, + locked: bool, + unstable_flags: &[String], + ) -> CargoResult<()> { + let extra_verbose = verbose >= 2; + let verbose = if verbose == 0 { None } else { Some(true) }; + + // Ignore errors in the configuration files. + let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val); + let cfg_color = self.get_string("term.color").unwrap_or(None).map(|v| v.val); + + let color = color.as_ref().or_else(|| cfg_color.as_ref()); + + let verbosity = match (verbose, cfg_verbose, quiet) { + (Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose, + + // command line takes precedence over configuration, so ignore the + // configuration. + (None, _, Some(true)) => Verbosity::Quiet, + + // Can't pass both at the same time on the command line regardless + // of configuration. + (Some(true), _, Some(true)) => { + bail!("cannot set both --verbose and --quiet"); + } + + // Can't actually get `Some(false)` as a value from the command + // line, so just ignore them here to appease exhaustiveness checking + // in match statements. + (Some(false), _, _) + | (_, _, Some(false)) + | (None, Some(false), None) + | (None, None, None) => Verbosity::Normal, + }; + + self.shell().set_verbosity(verbosity); + self.shell().set_color_choice(color.map(|s| &s[..]))?; + self.extra_verbose = extra_verbose; + self.frozen = frozen; + self.locked = locked; + self.cli_flags.parse(unstable_flags)?; + + Ok(()) + } + + pub fn cli_unstable(&self) -> &CliUnstable { + &self.cli_flags + } + + pub fn extra_verbose(&self) -> bool { + self.extra_verbose + } + + pub fn network_allowed(&self) -> bool { + !self.frozen() && !self.cli_unstable().offline + } + + pub fn frozen(&self) -> bool { + self.frozen + } + + pub fn lock_update_allowed(&self) -> bool { + !self.frozen && !self.locked + } + + /// Loads configuration from the filesystem + pub fn load_values(&self) -> CargoResult> { + let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); + + walk_tree(&self.cwd, |path| { + let mut contents = String::new(); + let mut file = File::open(&path)?; + file.read_to_string(&mut contents) + .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?; + let toml = cargo_toml::parse(&contents, path, self).chain_err(|| { + format!("could not parse TOML configuration in `{}`", path.display()) + })?; + let value = CV::from_toml(path, toml).chain_err(|| { + format!( + "failed to load TOML configuration from `{}`", + path.display() + ) + })?; + cfg.merge(value) + .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?; + Ok(()) + }).chain_err(|| "Couldn't load Cargo configuration")?; + + self.load_credentials(&mut cfg)?; + match cfg { + CV::Table(map, _) => Ok(map), + _ => unreachable!(), + } + } + + /// Gets the index for a registry. + pub fn get_registry_index(&self, registry: &str) -> CargoResult { + Ok( + match self.get_string(&format!("registries.{}.index", registry))? { + Some(index) => { + let url = index.val.to_url()?; + if url.username() != "" || url.password().is_some() { + bail!("Registry URLs may not contain credentials"); + } + url + } + None => bail!("No index found for registry: `{}`", registry), + }, + ) + } + + /// Loads credentials config from the credentials file into the ConfigValue object, if present. + fn load_credentials(&self, cfg: &mut ConfigValue) -> CargoResult<()> { + let home_path = self.home_path.clone().into_path_unlocked(); + let credentials = home_path.join("credentials"); + if !fs::metadata(&credentials).is_ok() { + return Ok(()); + } + + let mut contents = String::new(); + let mut file = File::open(&credentials)?; + file.read_to_string(&mut contents).chain_err(|| { + format!( + "failed to read configuration file `{}`", + credentials.display() + ) + })?; + + let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| { + format!( + "could not parse TOML configuration in `{}`", + credentials.display() + ) + })?; + + let mut value = CV::from_toml(&credentials, toml).chain_err(|| { + format!( + "failed to load TOML configuration from `{}`", + credentials.display() + ) + })?; + + // backwards compatibility for old .cargo/credentials layout + { + let value = match value { + CV::Table(ref mut value, _) => value, + _ => unreachable!(), + }; + + if let Some(token) = value.remove("token") { + if let Vacant(entry) = value.entry("registry".into()) { + let mut map = HashMap::new(); + map.insert("token".into(), token); + let table = CV::Table(map, PathBuf::from(".")); + entry.insert(table); + } + } + } + + // we want value to override cfg, so swap these + mem::swap(cfg, &mut value); + cfg.merge(value)?; + + Ok(()) + } + + /// Look for a path for `tool` in an environment variable or config path, but return `None` + /// if it's not present. + fn maybe_get_tool(&self, tool: &str) -> CargoResult> { + let var = tool.chars() + .flat_map(|c| c.to_uppercase()) + .collect::(); + if let Some(tool_path) = env::var_os(&var) { + let maybe_relative = match tool_path.to_str() { + Some(s) => s.contains("/") || s.contains("\\"), + None => false, + }; + let path = if maybe_relative { + self.cwd.join(tool_path) + } else { + PathBuf::from(tool_path) + }; + return Ok(Some(path)); + } + + let var = format!("build.{}", tool); + if let Some(tool_path) = self.get_path(&var)? { + return Ok(Some(tool_path.val)); + } + + Ok(None) + } + + /// Look for a path for `tool` in an environment variable or config path, defaulting to `tool` + /// as a path. + fn get_tool(&self, tool: &str) -> CargoResult { + self.maybe_get_tool(tool) + .map(|t| t.unwrap_or_else(|| PathBuf::from(tool))) + } + + pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> { + self.jobserver.as_ref() + } + + pub fn http(&self) -> CargoResult<&RefCell> { + let http = self.easy + .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?; + { + let mut http = http.borrow_mut(); + http.reset(); + ops::configure_http_handle(self, &mut http)?; + } + Ok(http) + } + + pub fn crates_io_source_id(&self, f: F) -> CargoResult + where + F: FnMut() -> CargoResult, + { + Ok(self.crates_io_source_id.try_borrow_with(f)?.clone()) + } +} + +#[derive(Eq, PartialEq, Clone, Copy)] +pub enum Location { + Project, + Global, +} + +#[derive(Eq, PartialEq, Clone, Deserialize)] +pub enum ConfigValue { + Integer(i64, PathBuf), + String(String, PathBuf), + List(Vec<(String, PathBuf)>, PathBuf), + Table(HashMap, PathBuf), + Boolean(bool, PathBuf), +} + +pub struct Value { + pub val: T, + pub definition: Definition, +} + +pub enum Definition { + Path(PathBuf), + Environment, +} + +impl fmt::Debug for ConfigValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()), + CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()), + CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()), + CV::List(ref list, ref path) => { + write!(f, "[")?; + for (i, &(ref s, ref path)) in list.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{} (from {})", s, path.display())?; + } + write!(f, "] (from {})", path.display()) + } + CV::Table(ref table, _) => write!(f, "{:?}", table), + } + } +} + +impl Serialize for ConfigValue { + fn serialize(&self, s: S) -> Result { + match *self { + CV::String(ref string, _) => string.serialize(s), + CV::List(ref list, _) => { + let list: Vec<&String> = list.iter().map(|s| &s.0).collect(); + list.serialize(s) + } + CV::Table(ref table, _) => table.serialize(s), + CV::Boolean(b, _) => b.serialize(s), + CV::Integer(i, _) => i.serialize(s), + } + } +} + +impl ConfigValue { + fn from_toml(path: &Path, toml: toml::Value) -> CargoResult { + match toml { + toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), + toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), + toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), + toml::Value::Array(val) => Ok(CV::List( + val.into_iter() + .map(|toml| match toml { + toml::Value::String(val) => Ok((val, path.to_path_buf())), + v => bail!("expected string but found {} in list", v.type_str()), + }) + .collect::>()?, + path.to_path_buf(), + )), + toml::Value::Table(val) => Ok(CV::Table( + val.into_iter() + .map(|(key, value)| { + let value = CV::from_toml(path, value) + .chain_err(|| format!("failed to parse key `{}`", key))?; + Ok((key, value)) + }) + .collect::>()?, + path.to_path_buf(), + )), + v => bail!( + "found TOML configuration value of unknown type `{}`", + v.type_str() + ), + } + } + + fn into_toml(self) -> toml::Value { + match self { + CV::Boolean(s, _) => toml::Value::Boolean(s), + CV::String(s, _) => toml::Value::String(s), + CV::Integer(i, _) => toml::Value::Integer(i), + CV::List(l, _) => { + toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect()) + } + CV::Table(l, _) => { + toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect()) + } + } + } + + fn merge(&mut self, from: ConfigValue) -> CargoResult<()> { + match (self, from) { + (&mut CV::String(..), CV::String(..)) + | (&mut CV::Integer(..), CV::Integer(..)) + | (&mut CV::Boolean(..), CV::Boolean(..)) => {} + (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { + let new = mem::replace(new, Vec::new()); + old.extend(new.into_iter()); + } + (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { + let new = mem::replace(new, HashMap::new()); + for (key, value) in new { + match old.entry(key.clone()) { + Occupied(mut entry) => { + let path = value.definition_path().to_path_buf(); + let entry = entry.get_mut(); + entry.merge(value).chain_err(|| { + format!( + "failed to merge key `{}` between \ + files:\n \ + file 1: {}\n \ + file 2: {}", + key, + entry.definition_path().display(), + path.display() + ) + })?; + } + Vacant(entry) => { + entry.insert(value); + } + }; + } + } + (expected, found) => { + return Err(internal(format!( + "expected {}, but found {}", + expected.desc(), + found.desc() + ))) + } + } + + Ok(()) + } + + pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> { + match *self { + CV::Integer(i, ref p) => Ok((i, p)), + _ => self.expected("integer", key), + } + } + + pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> { + match *self { + CV::String(ref s, ref p) => Ok((s, p)), + _ => self.expected("string", key), + } + } + + pub fn table(&self, key: &str) -> CargoResult<(&HashMap, &Path)> { + match *self { + CV::Table(ref table, ref p) => Ok((table, p)), + _ => self.expected("table", key), + } + } + + pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> { + match *self { + CV::List(ref list, _) => Ok(list), + _ => self.expected("list", key), + } + } + + pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> { + match *self { + CV::Boolean(b, ref p) => Ok((b, p)), + _ => self.expected("bool", key), + } + } + + pub fn desc(&self) -> &'static str { + match *self { + CV::Table(..) => "table", + CV::List(..) => "array", + CV::String(..) => "string", + CV::Boolean(..) => "boolean", + CV::Integer(..) => "integer", + } + } + + pub fn definition_path(&self) -> &Path { + match *self { + CV::Boolean(_, ref p) + | CV::Integer(_, ref p) + | CV::String(_, ref p) + | CV::List(_, ref p) + | CV::Table(_, ref p) => p, + } + } + + pub fn expected(&self, wanted: &str, key: &str) -> CargoResult { + bail!( + "expected a {}, but found a {} for `{}` in {}", + wanted, + self.desc(), + key, + self.definition_path().display() + ) + } +} + +impl Definition { + pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { + match *self { + Definition::Path(ref p) => p.parent().unwrap().parent().unwrap(), + Definition::Environment => config.cwd(), + } + } +} + +impl fmt::Display for Definition { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Definition::Path(ref p) => p.display().fmt(f), + Definition::Environment => "the environment".fmt(f), + } + } +} + +pub fn homedir(cwd: &Path) -> Option { + ::home::cargo_home_with_cwd(cwd).ok() +} + +fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> +where + F: FnMut(&Path) -> CargoResult<()>, +{ + let mut stash: HashSet = HashSet::new(); + + for current in paths::ancestors(pwd) { + let possible = current.join(".cargo").join("config"); + if fs::metadata(&possible).is_ok() { + walk(&possible)?; + stash.insert(possible); + } + } + + // Once we're done, also be sure to walk the home directory even if it's not + // in our history to be sure we pick up that standard location for + // information. + let home = homedir(pwd).ok_or_else(|| { + format_err!( + "Cargo couldn't find your home directory. \ + This probably means that $HOME was not set." + ) + })?; + let config = home.join("config"); + if !stash.contains(&config) && fs::metadata(&config).is_ok() { + walk(&config)?; + } + + Ok(()) +} + +pub fn save_credentials(cfg: &Config, token: String, registry: Option) -> CargoResult<()> { + let mut file = { + cfg.home_path.create_dir()?; + cfg.home_path + .open_rw(Path::new("credentials"), cfg, "credentials' config file")? + }; + + let (key, value) = { + let key = "token".to_string(); + let value = ConfigValue::String(token, file.path().to_path_buf()); + let mut map = HashMap::new(); + map.insert(key, value); + let table = CV::Table(map, file.path().to_path_buf()); + + if let Some(registry) = registry { + let mut map = HashMap::new(); + map.insert(registry, table); + ( + "registries".into(), + CV::Table(map, file.path().to_path_buf()), + ) + } else { + ("registry".into(), table) + } + }; + + let mut contents = String::new(); + file.read_to_string(&mut contents).chain_err(|| { + format!( + "failed to read configuration file `{}`", + file.path().display() + ) + })?; + + let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; + + // move the old token location to the new one + if let Some(token) = toml.as_table_mut().unwrap().remove("token") { + let mut map = HashMap::new(); + map.insert("token".to_string(), token); + toml.as_table_mut() + .unwrap() + .insert("registry".into(), map.into()); + } + + toml.as_table_mut().unwrap().insert(key, value.into_toml()); + + let contents = toml.to_string(); + file.seek(SeekFrom::Start(0))?; + file.write_all(contents.as_bytes())?; + file.file().set_len(contents.len() as u64)?; + set_permissions(file.file(), 0o600)?; + + return Ok(()); + + #[cfg(unix)] + fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { + use std::os::unix::fs::PermissionsExt; + + let mut perms = file.metadata()?.permissions(); + perms.set_mode(mode); + file.set_permissions(perms)?; + Ok(()) + } + + #[cfg(not(unix))] + #[allow(unused)] + fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { + Ok(()) + } +} diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs new file mode 100644 index 000000000..a52c410e8 --- /dev/null +++ b/src/cargo/util/dependency_queue.rs @@ -0,0 +1,231 @@ +//! A graph-like structure used to represent a set of dependencies and in what +//! order they should be built. +//! +//! This structure is used to store the dependency graph and dynamically update +//! it to figure out when a dependency should be built. + +use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::collections::{HashMap, HashSet}; +use std::hash::Hash; + +pub use self::Freshness::{Dirty, Fresh}; + +#[derive(Debug)] +pub struct DependencyQueue { + /// A list of all known keys to build. + /// + /// The value of the hash map is list of dependencies which still need to be + /// built before the package can be built. Note that the set is dynamically + /// updated as more dependencies are built. + dep_map: HashMap, V)>, + + /// A reverse mapping of a package to all packages that depend on that + /// package. + /// + /// This map is statically known and does not get updated throughout the + /// lifecycle of the DependencyQueue. + reverse_dep_map: HashMap>, + + /// A set of dirty packages. + /// + /// Packages may become dirty over time if their dependencies are rebuilt. + dirty: HashSet, + + /// The packages which are currently being built, waiting for a call to + /// `finish`. + pending: HashSet, + + /// Topological depth of each key + depth: HashMap, +} + +/// Indication of the freshness of a package. +/// +/// A fresh package does not necessarily need to be rebuilt (unless a dependency +/// was also rebuilt), and a dirty package must always be rebuilt. +#[derive(PartialEq, Eq, Debug, Clone, Copy)] +pub enum Freshness { + Fresh, + Dirty, +} + +impl Freshness { + pub fn combine(&self, other: Freshness) -> Freshness { + match *self { + Fresh => other, + Dirty => Dirty, + } + } +} + +impl Default for DependencyQueue { + fn default() -> DependencyQueue { + DependencyQueue::new() + } +} + +impl DependencyQueue { + /// Creates a new dependency queue with 0 packages. + pub fn new() -> DependencyQueue { + DependencyQueue { + dep_map: HashMap::new(), + reverse_dep_map: HashMap::new(), + dirty: HashSet::new(), + pending: HashSet::new(), + depth: HashMap::new(), + } + } + + /// Adds a new package to this dependency queue. + /// + /// It is assumed that any dependencies of this package will eventually also + /// be added to the dependency queue. + pub fn queue(&mut self, fresh: Freshness, key: K, value: V, dependencies: &[K]) -> &mut V { + let slot = match self.dep_map.entry(key.clone()) { + Occupied(v) => return &mut v.into_mut().1, + Vacant(v) => v, + }; + + if fresh == Dirty { + self.dirty.insert(key.clone()); + } + + let mut my_dependencies = HashSet::new(); + for dep in dependencies { + my_dependencies.insert(dep.clone()); + let rev = self.reverse_dep_map + .entry(dep.clone()) + .or_insert_with(HashSet::new); + rev.insert(key.clone()); + } + &mut slot.insert((my_dependencies, value)).1 + } + + /// All nodes have been added, calculate some internal metadata and prepare + /// for `dequeue`. + pub fn queue_finished(&mut self) { + for key in self.dep_map.keys() { + depth(key, &self.reverse_dep_map, &mut self.depth); + } + + fn depth( + key: &K, + map: &HashMap>, + results: &mut HashMap, + ) -> usize { + const IN_PROGRESS: usize = !0; + + if let Some(&depth) = results.get(key) { + assert_ne!(depth, IN_PROGRESS, "cycle in DependencyQueue"); + return depth; + } + + results.insert(key.clone(), IN_PROGRESS); + + let depth = 1 + + map.get(&key) + .into_iter() + .flat_map(|it| it) + .map(|dep| depth(dep, map, results)) + .max() + .unwrap_or(0); + + *results.get_mut(key).unwrap() = depth; + + depth + } + } + + /// Dequeues a package that is ready to be built. + /// + /// A package is ready to be built when it has 0 un-built dependencies. If + /// `None` is returned then no packages are ready to be built. + pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> { + // Look at all our crates and find everything that's ready to build (no + // deps). After we've got that candidate set select the one which has + // the maximum depth in the dependency graph. This way we should + // hopefully keep CPUs hottest the longest by ensuring that long + // dependency chains are scheduled early on in the build process and the + // leafs higher in the tree can fill in the cracks later. + // + // TODO: it'd be best here to throw in a heuristic of crate size as + // well. For example how long did this crate historically take to + // compile? How large is its source code? etc. + let next = self.dep_map + .iter() + .filter(|&(_, &(ref deps, _))| deps.is_empty()) + .map(|(key, _)| key.clone()) + .max_by_key(|k| self.depth[k]); + let key = match next { + Some(key) => key, + None => return None, + }; + let (_, data) = self.dep_map.remove(&key).unwrap(); + let fresh = if self.dirty.contains(&key) { + Dirty + } else { + Fresh + }; + self.pending.insert(key.clone()); + Some((fresh, key, data)) + } + + /// Returns whether there are remaining packages to be built. + pub fn is_empty(&self) -> bool { + self.dep_map.is_empty() && self.pending.is_empty() + } + + /// Returns the number of remaining packages to be built. + pub fn len(&self) -> usize { + self.dep_map.len() + self.pending.len() + } + + /// Indicate that a package has been built. + /// + /// This function will update the dependency queue with this information, + /// possibly allowing the next invocation of `dequeue` to return a package. + pub fn finish(&mut self, key: &K, fresh: Freshness) { + assert!(self.pending.remove(key)); + let reverse_deps = match self.reverse_dep_map.get(key) { + Some(deps) => deps, + None => return, + }; + for dep in reverse_deps.iter() { + if fresh == Dirty { + self.dirty.insert(dep.clone()); + } + assert!(self.dep_map.get_mut(dep).unwrap().0.remove(key)); + } + } +} + +#[cfg(test)] +mod test { + use super::{DependencyQueue, Freshness}; + + #[test] + fn deep_first() { + let mut q = DependencyQueue::new(); + + q.queue(Freshness::Fresh, 1, (), &[]); + q.queue(Freshness::Fresh, 2, (), &[1]); + q.queue(Freshness::Fresh, 3, (), &[]); + q.queue(Freshness::Fresh, 4, (), &[2, 3]); + q.queue(Freshness::Fresh, 5, (), &[4, 3]); + q.queue_finished(); + + assert_eq!(q.dequeue(), Some((Freshness::Fresh, 1, ()))); + assert_eq!(q.dequeue(), Some((Freshness::Fresh, 3, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&3, Freshness::Fresh); + assert_eq!(q.dequeue(), None); + q.finish(&1, Freshness::Fresh); + assert_eq!(q.dequeue(), Some((Freshness::Fresh, 2, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&2, Freshness::Fresh); + assert_eq!(q.dequeue(), Some((Freshness::Fresh, 4, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&4, Freshness::Fresh); + assert_eq!(q.dequeue(), Some((Freshness::Fresh, 5, ()))); + } +} diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs new file mode 100644 index 000000000..d3b1d43d0 --- /dev/null +++ b/src/cargo/util/errors.rs @@ -0,0 +1,283 @@ +#![allow(unknown_lints)] + +use std::fmt; +use std::process::{ExitStatus, Output}; +use std::str; + +use core::{TargetKind, Workspace}; +use failure::{Context, Error, Fail}; +use clap; + +pub use failure::Error as CargoError; +pub type CargoResult = Result; + +pub trait CargoResultExt { + fn chain_err(self, f: F) -> Result> + where + F: FnOnce() -> D, + D: fmt::Display + Send + Sync + 'static; +} + +impl CargoResultExt for Result +where + E: Into, +{ + fn chain_err(self, f: F) -> Result> + where + F: FnOnce() -> D, + D: fmt::Display + Send + Sync + 'static, + { + self.map_err(|failure| { + let context = f(); + failure.into().context(context) + }) + } +} + +#[derive(Debug, Fail)] +#[fail(display = "failed to get 200 response from `{}`, got {}", url, code)] +pub struct HttpNot200 { + pub code: u32, + pub url: String, +} + +pub struct Internal { + inner: Error, +} + +impl Internal { + pub fn new(inner: Error) -> Internal { + Internal { inner } + } +} + +impl Fail for Internal { + fn cause(&self) -> Option<&Fail> { + self.inner.cause().cause() + } +} + +impl fmt::Debug for Internal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} + +impl fmt::Display for Internal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} + +// ============================================================================= +// Process errors +#[derive(Debug, Fail)] +#[fail(display = "{}", desc)] +pub struct ProcessError { + pub desc: String, + pub exit: Option, + pub output: Option, +} + +// ============================================================================= +// Cargo test errors. + +/// Error when testcases fail +#[derive(Debug, Fail)] +#[fail(display = "{}", desc)] +pub struct CargoTestError { + pub test: Test, + pub desc: String, + pub exit: Option, + pub causes: Vec, +} + +#[derive(Debug)] +pub enum Test { + Multiple, + Doc, + UnitTest { + kind: TargetKind, + name: String, + pkg_name: String, + }, +} + +impl CargoTestError { + pub fn new(test: Test, errors: Vec) -> Self { + if errors.is_empty() { + panic!("Cannot create CargoTestError from empty Vec") + } + let desc = errors + .iter() + .map(|error| error.desc.clone()) + .collect::>() + .join("\n"); + CargoTestError { + test, + desc, + exit: errors[0].exit, + causes: errors, + } + } + + pub fn hint(&self, ws: &Workspace) -> String { + match self.test { + Test::UnitTest { + ref kind, + ref name, + ref pkg_name, + } => { + let pkg_info = if ws.members().count() > 1 && ws.is_virtual() { + format!("-p {} ", pkg_name) + } else { + String::new() + }; + + match *kind { + TargetKind::Bench => { + format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name) + } + TargetKind::Bin => { + format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name) + } + TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info), + TargetKind::Test => { + format!("test failed, to rerun pass '{}--test {}'", pkg_info, name) + } + TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { + format!("test failed, to rerun pass '{}--example {}", pkg_info, name) + } + _ => "test failed.".into(), + } + } + Test::Doc => "test failed, to rerun pass '--doc'".into(), + _ => "test failed.".into(), + } + } +} + +// ============================================================================= +// CLI errors + +pub type CliResult = Result<(), CliError>; + +#[derive(Debug)] +pub struct CliError { + pub error: Option, + pub unknown: bool, + pub exit_code: i32, +} + +impl CliError { + pub fn new(error: CargoError, code: i32) -> CliError { + let unknown = error.downcast_ref::().is_some(); + CliError { + error: Some(error), + exit_code: code, + unknown, + } + } + + pub fn code(code: i32) -> CliError { + CliError { + error: None, + exit_code: code, + unknown: false, + } + } +} + +impl From for CliError { + fn from(err: CargoError) -> CliError { + CliError::new(err, 101) + } +} + +impl From for CliError { + fn from(err: clap::Error) -> CliError { + let code = if err.use_stderr() { 1 } else { 0 }; + CliError::new(err.into(), code) + } +} + +// ============================================================================= +// Construction helpers + +pub fn process_error( + msg: &str, + status: Option<&ExitStatus>, + output: Option<&Output>, +) -> ProcessError { + let exit = match status { + Some(s) => status_to_string(s), + None => "never executed".to_string(), + }; + let mut desc = format!("{} ({})", &msg, exit); + + if let Some(out) = output { + match str::from_utf8(&out.stdout) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stdout\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + match str::from_utf8(&out.stderr) { + Ok(s) if !s.trim().is_empty() => { + desc.push_str("\n--- stderr\n"); + desc.push_str(s); + } + Ok(..) | Err(..) => {} + } + } + + return ProcessError { + desc, + exit: status.cloned(), + output: output.cloned(), + }; + + #[cfg(unix)] + fn status_to_string(status: &ExitStatus) -> String { + use std::os::unix::process::*; + use libc; + + if let Some(signal) = status.signal() { + let name = match signal as libc::c_int { + libc::SIGABRT => ", SIGABRT: process abort signal", + libc::SIGALRM => ", SIGALRM: alarm clock", + libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", + libc::SIGHUP => ", SIGHUP: hangup", + libc::SIGILL => ", SIGILL: illegal instruction", + libc::SIGINT => ", SIGINT: terminal interrupt signal", + libc::SIGKILL => ", SIGKILL: kill", + libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", + libc::SIGQUIT => ", SIGQUIT: terminal quite signal", + libc::SIGSEGV => ", SIGSEGV: invalid memory reference", + libc::SIGTERM => ", SIGTERM: termination signal", + libc::SIGBUS => ", SIGBUS: access to undefined memory", + #[cfg(not(target_os = "haiku"))] + libc::SIGSYS => ", SIGSYS: bad system call", + libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", + _ => "", + }; + format!("signal: {}{}", signal, name) + } else { + status.to_string() + } + } + + #[cfg(windows)] + fn status_to_string(status: &ExitStatus) -> String { + status.to_string() + } +} + +pub fn internal(error: S) -> CargoError { + _internal(&error) +} + +fn _internal(error: &fmt::Display) -> CargoError { + Internal::new(format_err!("{}", error)).into() +} diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs new file mode 100644 index 000000000..bba61fbf5 --- /dev/null +++ b/src/cargo/util/flock.rs @@ -0,0 +1,368 @@ +use std::fs::{self, File, OpenOptions}; +use std::io::{Read, Seek, SeekFrom, Write}; +use std::io; +use std::path::{Display, Path, PathBuf}; + +use termcolor::Color::Cyan; +use fs2::{lock_contended_error, FileExt}; +#[allow(unused_imports)] +use libc; + +use util::Config; +use util::paths; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +pub struct FileLock { + f: Option, + path: PathBuf, + state: State, +} + +#[derive(PartialEq, Debug)] +enum State { + Unlocked, + Shared, + Exclusive, +} + +impl FileLock { + /// Returns the underlying file handle of this lock. + pub fn file(&self) -> &File { + self.f.as_ref().unwrap() + } + + /// Returns the underlying path that this lock points to. + /// + /// Note that special care must be taken to ensure that the path is not + /// referenced outside the lifetime of this lock. + pub fn path(&self) -> &Path { + assert_ne!(self.state, State::Unlocked); + &self.path + } + + /// Returns the parent path containing this file + pub fn parent(&self) -> &Path { + assert_ne!(self.state, State::Unlocked); + self.path.parent().unwrap() + } + + /// Removes all sibling files to this locked file. + /// + /// This can be useful if a directory is locked with a sentinel file but it + /// needs to be cleared out as it may be corrupt. + pub fn remove_siblings(&self) -> CargoResult<()> { + let path = self.path(); + for entry in path.parent().unwrap().read_dir()? { + let entry = entry?; + if Some(&entry.file_name()[..]) == path.file_name() { + continue; + } + let kind = entry.file_type()?; + if kind.is_dir() { + paths::remove_dir_all(entry.path())?; + } else { + paths::remove_file(entry.path())?; + } + } + Ok(()) + } +} + +impl Read for FileLock { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.file().read(buf) + } +} + +impl Seek for FileLock { + fn seek(&mut self, to: SeekFrom) -> io::Result { + self.file().seek(to) + } +} + +impl Write for FileLock { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.file().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.file().flush() + } +} + +impl Drop for FileLock { + fn drop(&mut self) { + if self.state != State::Unlocked { + if let Some(f) = self.f.take() { + let _ = f.unlock(); + } + } + } +} + +/// A "filesystem" is intended to be a globally shared, hence locked, resource +/// in Cargo. +/// +/// The `Path` of a filesystem cannot be learned unless it's done in a locked +/// fashion, and otherwise functions on this structure are prepared to handle +/// concurrent invocations across multiple instances of Cargo. +#[derive(Clone, Debug)] +pub struct Filesystem { + root: PathBuf, +} + +impl Filesystem { + /// Creates a new filesystem to be rooted at the given path. + pub fn new(path: PathBuf) -> Filesystem { + Filesystem { root: path } + } + + /// Like `Path::join`, creates a new filesystem rooted at this filesystem + /// joined with the given path. + pub fn join>(&self, other: T) -> Filesystem { + Filesystem::new(self.root.join(other)) + } + + /// Like `Path::push`, pushes a new path component onto this filesystem. + pub fn push>(&mut self, other: T) { + self.root.push(other); + } + + /// Consumes this filesystem and returns the underlying `PathBuf`. + /// + /// Note that this is a relatively dangerous operation and should be used + /// with great caution!. + pub fn into_path_unlocked(self) -> PathBuf { + self.root + } + + /// Creates the directory pointed to by this filesystem. + /// + /// Handles errors where other Cargo processes are also attempting to + /// concurrently create this directory. + pub fn create_dir(&self) -> io::Result<()> { + create_dir_all(&self.root) + } + + /// Returns an adaptor that can be used to print the path of this + /// filesystem. + pub fn display(&self) -> Display { + self.root.display() + } + + /// Opens exclusive access to a file, returning the locked version of a + /// file. + /// + /// This function will create a file at `path` if it doesn't already exist + /// (including intermediate directories), and then it will acquire an + /// exclusive lock on `path`. If the process must block waiting for the + /// lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has + /// read/write access to the underlying file. + pub fn open_rw

(&self, path: P, config: &Config, msg: &str) -> CargoResult + where + P: AsRef, + { + self.open( + path.as_ref(), + OpenOptions::new().read(true).write(true).create(true), + State::Exclusive, + config, + msg, + ) + } + + /// Opens shared access to a file, returning the locked version of a file. + /// + /// This function will fail if `path` doesn't already exist, but if it does + /// then it will acquire a shared lock on `path`. If the process must block + /// waiting for the lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has read + /// access to the underlying file. Any writes to the file will return an + /// error. + pub fn open_ro

(&self, path: P, config: &Config, msg: &str) -> CargoResult + where + P: AsRef, + { + self.open( + path.as_ref(), + OpenOptions::new().read(true), + State::Shared, + config, + msg, + ) + } + + fn open( + &self, + path: &Path, + opts: &OpenOptions, + state: State, + config: &Config, + msg: &str, + ) -> CargoResult { + let path = self.root.join(path); + + // If we want an exclusive lock then if we fail because of NotFound it's + // likely because an intermediate directory didn't exist, so try to + // create the directory and then continue. + let f = opts.open(&path) + .or_else(|e| { + if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { + create_dir_all(path.parent().unwrap())?; + opts.open(&path) + } else { + Err(e) + } + }) + .chain_err(|| format!("failed to open: {}", path.display()))?; + match state { + State::Exclusive => { + acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| { + f.lock_exclusive() + })?; + } + State::Shared => { + acquire(config, msg, &path, &|| f.try_lock_shared(), &|| { + f.lock_shared() + })?; + } + State::Unlocked => {} + } + Ok(FileLock { + f: Some(f), + path, + state, + }) + } +} + +impl PartialEq for Filesystem { + fn eq(&self, other: &Path) -> bool { + self.root == other + } +} + +impl PartialEq for Path { + fn eq(&self, other: &Filesystem) -> bool { + self == other.root + } +} + +/// Acquires a lock on a file in a "nice" manner. +/// +/// Almost all long-running blocking actions in Cargo have a status message +/// associated with them as we're not sure how long they'll take. Whenever a +/// conflicted file lock happens, this is the case (we're not sure when the lock +/// will be released). +/// +/// This function will acquire the lock on a `path`, printing out a nice message +/// to the console if we have to wait for it. It will first attempt to use `try` +/// to acquire a lock on the crate, and in the case of contention it will emit a +/// status message based on `msg` to `config`'s shell, and then use `block` to +/// block waiting to acquire a lock. +/// +/// Returns an error if the lock could not be acquired or if any error other +/// than a contention error happens. +fn acquire( + config: &Config, + msg: &str, + path: &Path, + try: &Fn() -> io::Result<()>, + block: &Fn() -> io::Result<()>, +) -> CargoResult<()> { + // File locking on Unix is currently implemented via `flock`, which is known + // to be broken on NFS. We could in theory just ignore errors that happen on + // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking + // forever**, even if the nonblocking flag is passed! + // + // As a result, we just skip all file locks entirely on NFS mounts. That + // should avoid calling any `flock` functions at all, and it wouldn't work + // there anyway. + // + // [1]: https://github.com/rust-lang/cargo/issues/2615 + if is_on_nfs_mount(path) { + return Ok(()); + } + + match try() { + Ok(()) => return Ok(()), + + // In addition to ignoring NFS which is commonly not working we also + // just ignore locking on filesystems that look like they don't + // implement file locking. We detect that here via the return value of + // locking (e.g. inspecting errno). + #[cfg(unix)] + Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => + { + return Ok(()) + } + + #[cfg(target_os = "linux")] + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => + { + return Ok(()) + } + + Err(e) => { + if e.raw_os_error() != lock_contended_error().raw_os_error() { + let e = CargoError::from(e); + let cx = format!("failed to lock file: {}", path.display()); + return Err(e.context(cx).into()); + } + } + } + let msg = format!("waiting for file lock on {}", msg); + config.shell().status_with_color("Blocking", &msg, Cyan)?; + + block().chain_err(|| format!("failed to lock file: {}", path.display()))?; + return Ok(()); + + #[cfg(all(target_os = "linux", not(target_env = "musl")))] + fn is_on_nfs_mount(path: &Path) -> bool { + use std::ffi::CString; + use std::mem; + use std::os::unix::prelude::*; + + let path = match CString::new(path.as_os_str().as_bytes()) { + Ok(path) => path, + Err(_) => return false, + }; + + unsafe { + let mut buf: libc::statfs = mem::zeroed(); + let r = libc::statfs(path.as_ptr(), &mut buf); + + r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32 + } + } + + #[cfg(any(not(target_os = "linux"), target_env = "musl"))] + fn is_on_nfs_mount(_path: &Path) -> bool { + false + } +} + +fn create_dir_all(path: &Path) -> io::Result<()> { + match create_dir(path) { + Ok(()) => Ok(()), + Err(e) => { + if e.kind() == io::ErrorKind::NotFound { + if let Some(p) = path.parent() { + return create_dir_all(p).and_then(|()| create_dir(path)); + } + } + Err(e) + } + } +} + +fn create_dir(path: &Path) -> io::Result<()> { + match fs::create_dir(path) { + Ok(()) => Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), + Err(e) => Err(e), + } +} diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs new file mode 100644 index 000000000..b5899e9e1 --- /dev/null +++ b/src/cargo/util/graph.rs @@ -0,0 +1,139 @@ +use std::fmt; +use std::hash::Hash; +use std::collections::hash_set::{HashSet, Iter}; +use std::collections::hash_map::{HashMap, Keys}; + +pub struct Graph { + nodes: HashMap>, +} + +enum Mark { + InProgress, + Done, +} + +pub type Nodes<'a, N> = Keys<'a, N, HashSet>; +pub type Edges<'a, N> = Iter<'a, N>; + +impl Graph { + pub fn new() -> Graph { + Graph { + nodes: HashMap::new(), + } + } + + pub fn add(&mut self, node: N, children: &[N]) { + self.nodes + .entry(node) + .or_insert_with(HashSet::new) + .extend(children.iter().cloned()); + } + + pub fn link(&mut self, node: N, child: N) { + self.nodes + .entry(node) + .or_insert_with(HashSet::new) + .insert(child); + } + + pub fn get_nodes(&self) -> &HashMap> { + &self.nodes + } + + pub fn edges(&self, node: &N) -> Option> { + self.nodes.get(node).map(|set| set.iter()) + } + + pub fn sort(&self) -> Option> { + let mut ret = Vec::new(); + let mut marks = HashMap::new(); + + for node in self.nodes.keys() { + self.visit(node, &mut ret, &mut marks); + } + + Some(ret) + } + + fn visit(&self, node: &N, dst: &mut Vec, marks: &mut HashMap) { + if marks.contains_key(node) { + return; + } + + marks.insert(node.clone(), Mark::InProgress); + + for child in &self.nodes[node] { + self.visit(child, dst, marks); + } + + dst.push(node.clone()); + marks.insert(node.clone(), Mark::Done); + } + + pub fn iter(&self) -> Nodes { + self.nodes.keys() + } + + /// Resolves one of the paths from the given dependent package up to + /// the root. + pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<&'a N> { + // Note that this implementation isn't the most robust per se, we'll + // likely have to tweak this over time. For now though it works for what + // it's used for! + let mut result = vec![pkg]; + let first_pkg_depending_on = |pkg: &N, res: &[&N]| { + self.get_nodes() + .iter() + .filter(|&(_node, adjacent)| adjacent.contains(pkg)) + // Note that we can have "cycles" introduced through dev-dependency + // edges, so make sure we don't loop infinitely. + .filter(|&(_node, _)| !res.contains(&_node)) + .next() + .map(|p| p.0) + }; + while let Some(p) = first_pkg_depending_on(pkg, &result) { + result.push(p); + pkg = p; + } + result + } +} + +impl Default for Graph { + fn default() -> Graph { + Graph::new() + } +} + +impl fmt::Debug for Graph { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + writeln!(fmt, "Graph {{")?; + + for (n, e) in &self.nodes { + writeln!(fmt, " - {}", n)?; + + for n in e.iter() { + writeln!(fmt, " - {}", n)?; + } + } + + write!(fmt, "}}")?; + + Ok(()) + } +} + +impl PartialEq for Graph { + fn eq(&self, other: &Graph) -> bool { + self.nodes.eq(&other.nodes) + } +} +impl Eq for Graph {} + +impl Clone for Graph { + fn clone(&self) -> Graph { + Graph { + nodes: self.nodes.clone(), + } + } +} diff --git a/src/cargo/util/hex.rs b/src/cargo/util/hex.rs new file mode 100644 index 000000000..7e4dd00e9 --- /dev/null +++ b/src/cargo/util/hex.rs @@ -0,0 +1,27 @@ +#![allow(deprecated)] + +use hex; +use std::hash::{Hash, Hasher, SipHasher}; + +pub fn to_hex(num: u64) -> String { + hex::encode(&[ + (num >> 0) as u8, + (num >> 8) as u8, + (num >> 16) as u8, + (num >> 24) as u8, + (num >> 32) as u8, + (num >> 40) as u8, + (num >> 48) as u8, + (num >> 56) as u8, + ]) +} + +pub fn hash_u64(hashable: &H) -> u64 { + let mut hasher = SipHasher::new_with_keys(0, 0); + hashable.hash(&mut hasher); + hasher.finish() +} + +pub fn short_hash(hashable: &H) -> String { + to_hex(hash_u64(hashable)) +} diff --git a/src/cargo/util/important_paths.rs b/src/cargo/util/important_paths.rs new file mode 100644 index 000000000..2fb4dea59 --- /dev/null +++ b/src/cargo/util/important_paths.rs @@ -0,0 +1,32 @@ +use std::fs; +use std::path::{Path, PathBuf}; +use util::errors::CargoResult; +use util::paths; + +/// Find the root Cargo.toml +pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult { + let file = "Cargo.toml"; + for current in paths::ancestors(cwd) { + let manifest = current.join(file); + if fs::metadata(&manifest).is_ok() { + return Ok(manifest); + } + } + + bail!( + "could not find `{}` in `{}` or any parent directory", + file, + cwd.display() + ) +} + +/// Return the path to the `file` in `pwd`, if it exists. +pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { + let manifest = pwd.join(file); + + if manifest.exists() { + Ok(manifest) + } else { + bail!("Could not find `{}` in `{}`", file, pwd.display()) + } +} diff --git a/src/cargo/util/job.rs b/src/cargo/util/job.rs new file mode 100644 index 000000000..0df14e061 --- /dev/null +++ b/src/cargo/util/job.rs @@ -0,0 +1,271 @@ +//! Job management (mostly for windows) +//! +//! Most of the time when you're running cargo you expect Ctrl-C to actually +//! terminate the entire tree of processes in play, not just the one at the top +//! (cago). This currently works "by default" on Unix platforms because Ctrl-C +//! actually sends a signal to the *process group* rather than the parent +//! process, so everything will get torn down. On Windows, however, this does +//! not happen and Ctrl-C just kills cargo. +//! +//! To achieve the same semantics on Windows we use Job Objects to ensure that +//! all processes die at the same time. Job objects have a mode of operation +//! where when all handles to the object are closed it causes all child +//! processes associated with the object to be terminated immediately. +//! Conveniently whenever a process in the job object spawns a new process the +//! child will be associated with the job object as well. This means if we add +//! ourselves to the job object we create then everything will get torn down! + +pub use self::imp::Setup; + +pub fn setup() -> Option { + unsafe { imp::setup() } +} + +#[cfg(unix)] +mod imp { + use std::env; + use libc; + + pub type Setup = (); + + pub unsafe fn setup() -> Option<()> { + // There's a test case for the behavior of + // when-cargo-is-killed-subprocesses-are-also-killed, but that requires + // one cargo spawned to become its own session leader, so we do that + // here. + if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() { + libc::setsid(); + } + Some(()) + } +} + +#[cfg(windows)] +mod imp { + extern crate winapi; + + use std::ffi::OsString; + use std::io; + use std::mem; + use std::os::windows::prelude::*; + + use self::winapi::shared::basetsd::*; + use self::winapi::shared::minwindef::*; + use self::winapi::shared::minwindef::{FALSE, TRUE}; + use self::winapi::um::handleapi::*; + use self::winapi::um::jobapi2::*; + use self::winapi::um::jobapi::*; + use self::winapi::um::processthreadsapi::*; + use self::winapi::um::psapi::*; + use self::winapi::um::synchapi::*; + use self::winapi::um::winbase::*; + use self::winapi::um::winnt::*; + use self::winapi::um::winnt::HANDLE; + + pub struct Setup { + job: Handle, + } + + pub struct Handle { + inner: HANDLE, + } + + fn last_err() -> io::Error { + io::Error::last_os_error() + } + + pub unsafe fn setup() -> Option { + // Creates a new job object for us to use and then adds ourselves to it. + // Note that all errors are basically ignored in this function, + // intentionally. Job objects are "relatively new" in Windows, + // particularly the ability to support nested job objects. Older + // Windows installs don't support this ability. We probably don't want + // to force Cargo to abort in this situation or force others to *not* + // use job objects, so we instead just ignore errors and assume that + // we're otherwise part of someone else's job object in this case. + + let job = CreateJobObjectW(0 as *mut _, 0 as *const _); + if job.is_null() { + return None; + } + let job = Handle { inner: job }; + + // Indicate that when all handles to the job object are gone that all + // process in the object should be killed. Note that this includes our + // entire process tree by default because we've added ourselves and and + // our children will reside in the job once we spawn a process. + let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; + let r = SetInformationJobObject( + job.inner, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); + if r == 0 { + return None; + } + + // Assign our process to this job object, meaning that our children will + // now live or die based on our existence. + let me = GetCurrentProcess(); + let r = AssignProcessToJobObject(job.inner, me); + if r == 0 { + return None; + } + + Some(Setup { job }) + } + + impl Drop for Setup { + fn drop(&mut self) { + // This is a litte subtle. By default if we are terminated then all + // processes in our job object are terminated as well, but we + // intentionally want to whitelist some processes to outlive our job + // object (see below). + // + // To allow for this, we manually kill processes instead of letting + // the job object kill them for us. We do this in a loop to handle + // processes spawning other processes. + // + // Finally once this is all done we know that the only remaining + // ones are ourselves and the whitelisted processes. The destructor + // here then configures our job object to *not* kill everything on + // close, then closes the job object. + unsafe { + while self.kill_remaining() { + info!("killed some, going for more"); + } + + let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + let r = SetInformationJobObject( + self.job.inner, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); + if r == 0 { + info!("failed to configure job object to defaults: {}", last_err()); + } + } + } + } + + impl Setup { + unsafe fn kill_remaining(&mut self) -> bool { + #[repr(C)] + struct Jobs { + header: JOBOBJECT_BASIC_PROCESS_ID_LIST, + list: [ULONG_PTR; 1024], + } + + let mut jobs: Jobs = mem::zeroed(); + let r = QueryInformationJobObject( + self.job.inner, + JobObjectBasicProcessIdList, + &mut jobs as *mut _ as LPVOID, + mem::size_of_val(&jobs) as DWORD, + 0 as *mut _, + ); + if r == 0 { + info!("failed to query job object: {}", last_err()); + return false; + } + + let mut killed = false; + let list = &jobs.list[..jobs.header.NumberOfProcessIdsInList as usize]; + assert!(list.len() > 0); + info!("found {} remaining processes", list.len() - 1); + + let list = list.iter() + .filter(|&&id| { + // let's not kill ourselves + id as DWORD != GetCurrentProcessId() + }) + .filter_map(|&id| { + // Open the process with the necessary rights, and if this + // fails then we probably raced with the process exiting so we + // ignore the problem. + let flags = PROCESS_QUERY_INFORMATION | PROCESS_TERMINATE | SYNCHRONIZE; + let p = OpenProcess(flags, FALSE, id as DWORD); + if p.is_null() { + None + } else { + Some(Handle { inner: p }) + } + }) + .filter(|p| { + // Test if this process was actually in the job object or not. + // If it's not then we likely raced with something else + // recycling this PID, so we just skip this step. + let mut res = 0; + let r = IsProcessInJob(p.inner, self.job.inner, &mut res); + if r == 0 { + info!("failed to test is process in job: {}", last_err()); + return false; + } + res == TRUE + }); + + for p in list { + // Load the file which this process was spawned from. We then + // later use this for identification purposes. + let mut buf = [0; 1024]; + let r = GetProcessImageFileNameW(p.inner, buf.as_mut_ptr(), buf.len() as DWORD); + if r == 0 { + info!("failed to get image name: {}", last_err()); + continue; + } + let s = OsString::from_wide(&buf[..r as usize]); + info!("found remaining: {:?}", s); + + // And here's where we find the whole purpose for this + // function! Currently, our only whitelisted process is + // `mspdbsrv.exe`, and more details about that can be found + // here: + // + // https://github.com/rust-lang/rust/issues/33145 + // + // The gist of it is that all builds on one machine use the + // same `mspdbsrv.exe` instance. If we were to kill this + // instance then we could erroneously cause other builds to + // fail. + if let Some(s) = s.to_str() { + if s.contains("mspdbsrv") { + info!("\toops, this is mspdbsrv"); + continue; + } + } + + // Ok, this isn't mspdbsrv, let's kill the process. After we + // kill it we wait on it to ensure that the next time around in + // this function we're not going to see it again. + let r = TerminateProcess(p.inner, 1); + if r == 0 { + info!("\tfailed to kill subprocess: {}", last_err()); + info!("\tassuming subprocess is dead..."); + } else { + info!("\tterminated subprocess"); + } + let r = WaitForSingleObject(p.inner, INFINITE); + if r != 0 { + info!("failed to wait for process to die: {}", last_err()); + return false; + } + killed = true; + } + + killed + } + } + + impl Drop for Handle { + fn drop(&mut self) { + unsafe { + CloseHandle(self.inner); + } + } + } +} diff --git a/src/cargo/util/lev_distance.rs b/src/cargo/util/lev_distance.rs new file mode 100644 index 000000000..1d1eae05a --- /dev/null +++ b/src/cargo/util/lev_distance.rs @@ -0,0 +1,56 @@ +use std::cmp; + +pub fn lev_distance(me: &str, t: &str) -> usize { + if me.is_empty() { + return t.chars().count(); + } + if t.is_empty() { + return me.chars().count(); + } + + let mut dcol = (0..t.len() + 1).collect::>(); + let mut t_last = 0; + + for (i, sc) in me.chars().enumerate() { + let mut current = i; + dcol[0] = current + 1; + + for (j, tc) in t.chars().enumerate() { + let next = dcol[j + 1]; + + if sc == tc { + dcol[j + 1] = current; + } else { + dcol[j + 1] = cmp::min(current, next); + dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; + } + + current = next; + t_last = j; + } + } + + dcol[t_last + 1] +} + +#[test] +fn test_lev_distance() { + use std::char::{from_u32, MAX}; + // Test bytelength agnosticity + for c in (0u32..MAX as u32) + .filter_map(|i| from_u32(i)) + .map(|i| i.to_string()) + { + assert_eq!(lev_distance(&c, &c), 0); + } + + let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; + let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; + let c = "Mary häd ä little lämb\n\nLittle lämb\n"; + assert_eq!(lev_distance(a, b), 1); + assert_eq!(lev_distance(b, a), 1); + assert_eq!(lev_distance(a, c), 2); + assert_eq!(lev_distance(c, a), 2); + assert_eq!(lev_distance(b, c), 1); + assert_eq!(lev_distance(c, b), 1); +} diff --git a/src/cargo/util/machine_message.rs b/src/cargo/util/machine_message.rs new file mode 100644 index 000000000..d2225eacd --- /dev/null +++ b/src/cargo/util/machine_message.rs @@ -0,0 +1,58 @@ +use serde::ser; +use serde_json::{self, Value}; + +use core::{PackageId, Profile, Target}; + +pub trait Message: ser::Serialize { + fn reason(&self) -> &str; +} + +pub fn emit(t: &T) { + let mut json: Value = serde_json::to_value(t).unwrap(); + json["reason"] = json!(t.reason()); + println!("{}", json); +} + +#[derive(Serialize)] +pub struct FromCompiler<'a> { + pub package_id: &'a PackageId, + pub target: &'a Target, + pub message: serde_json::Value, +} + +impl<'a> Message for FromCompiler<'a> { + fn reason(&self) -> &str { + "compiler-message" + } +} + +#[derive(Serialize)] +pub struct Artifact<'a> { + pub package_id: &'a PackageId, + pub target: &'a Target, + pub profile: &'a Profile, + pub features: Vec, + pub filenames: Vec, + pub fresh: bool, +} + +impl<'a> Message for Artifact<'a> { + fn reason(&self) -> &str { + "compiler-artifact" + } +} + +#[derive(Serialize)] +pub struct BuildScript<'a> { + pub package_id: &'a PackageId, + pub linked_libs: &'a [String], + pub linked_paths: &'a [String], + pub cfgs: &'a [String], + pub env: &'a [(String, String)], +} + +impl<'a> Message for BuildScript<'a> { + fn reason(&self) -> &str { + "build-script-executed" + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs new file mode 100644 index 000000000..2d9505d9a --- /dev/null +++ b/src/cargo/util/mod.rs @@ -0,0 +1,44 @@ +pub use self::cfg::{Cfg, CfgExpr}; +pub use self::config::{homedir, Config, ConfigValue}; +pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness}; +pub use self::errors::{CargoError, CargoResult, CargoResultExt, CliResult, Test}; +pub use self::errors::{CargoTestError, CliError, ProcessError}; +pub use self::errors::{internal, process_error}; +pub use self::flock::{FileLock, Filesystem}; +pub use self::graph::Graph; +pub use self::hex::{short_hash, to_hex, hash_u64}; +pub use self::lev_distance::lev_distance; +pub use self::paths::{dylib_path, join_paths, bytes2path, path2bytes}; +pub use self::paths::{dylib_path_envvar, normalize_path, without_prefix}; +pub use self::process_builder::{process, ProcessBuilder}; +pub use self::rustc::Rustc; +pub use self::sha256::Sha256; +pub use self::to_semver::ToSemver; +pub use self::to_url::ToUrl; +pub use self::vcs::{FossilRepo, GitRepo, HgRepo, PijulRepo}; +pub use self::read2::read2; +pub use self::progress::Progress; + +pub mod config; +pub mod errors; +pub mod graph; +pub mod hex; +pub mod important_paths; +pub mod job; +pub mod lev_distance; +pub mod machine_message; +pub mod network; +pub mod paths; +pub mod process_builder; +pub mod profile; +pub mod to_semver; +pub mod to_url; +pub mod toml; +mod cfg; +mod dependency_queue; +mod rustc; +mod sha256; +mod vcs; +mod flock; +mod read2; +mod progress; diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs new file mode 100644 index 000000000..e789a929d --- /dev/null +++ b/src/cargo/util/network.rs @@ -0,0 +1,106 @@ +use curl; +use git2; + +use failure::Error; + +use util::Config; +use util::errors::{CargoResult, HttpNot200}; + +fn maybe_spurious(err: &Error) -> bool { + for e in err.causes() { + if let Some(git_err) = e.downcast_ref::() { + match git_err.class() { + git2::ErrorClass::Net | git2::ErrorClass::Os => return true, + _ => (), + } + } + if let Some(curl_err) = e.downcast_ref::() { + if curl_err.is_couldnt_connect() || curl_err.is_couldnt_resolve_proxy() + || curl_err.is_couldnt_resolve_host() + || curl_err.is_operation_timedout() || curl_err.is_recv_error() + { + return true; + } + } + if let Some(not_200) = e.downcast_ref::() { + if 500 <= not_200.code && not_200.code < 600 { + return true; + } + } + } + false +} + +/// Wrapper method for network call retry logic. +/// +/// Retry counts provided by Config object `net.retry`. Config shell outputs +/// a warning on per retry. +/// +/// Closure must return a `CargoResult`. +/// +/// # Examples +/// +/// ```ignore +/// use util::network; +/// cargo_result = network::with_retry(&config, || something.download()); +/// ``` +pub fn with_retry(config: &Config, mut callback: F) -> CargoResult +where + F: FnMut() -> CargoResult, +{ + let mut remaining = config.net_retry()?; + loop { + match callback() { + Ok(ret) => return Ok(ret), + Err(ref e) if maybe_spurious(e) && remaining > 0 => { + let msg = format!( + "spurious network error ({} tries \ + remaining): {}", + remaining, e + ); + config.shell().warn(msg)?; + remaining -= 1; + } + //todo impl from + Err(e) => return Err(e.into()), + } + } +} +#[test] +fn with_retry_repeats_the_call_then_works() { + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + let error1 = HttpNot200 { + code: 501, + url: "Uri".to_string(), + }.into(); + let error2 = HttpNot200 { + code: 502, + url: "Uri".to_string(), + }.into(); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} + +#[test] +fn with_retry_finds_nested_spurious_errors() { + use util::CargoError; + + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + //String error messages are not considered spurious + let error1 = CargoError::from(HttpNot200 { + code: 501, + url: "Uri".to_string(), + }); + let error1 = CargoError::from(error1.context("A non-spurious wrapping err")); + let error2 = CargoError::from(HttpNot200 { + code: 502, + url: "Uri".to_string(), + }); + let error2 = CargoError::from(error2.context("A second chained error")); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs new file mode 100644 index 000000000..7cf540f8f --- /dev/null +++ b/src/cargo/util/paths.rs @@ -0,0 +1,261 @@ +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::prelude::*; +use std::path::{Component, Path, PathBuf}; + +use util::{internal, CargoResult}; +use util::errors::{CargoError, CargoResultExt, Internal}; + +pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { + let err = match env::join_paths(paths.iter()) { + Ok(paths) => return Ok(paths), + Err(e) => e, + }; + let paths = paths.iter().map(Path::new).collect::>(); + let err = CargoError::from(err); + let explain = Internal::new(format_err!("failed to join path array: {:?}", paths)); + let err = CargoError::from(err.context(explain)); + let more_explain = format!( + "failed to join search paths together\n\ + Does ${} have an unterminated quote character?", + env + ); + Err(err.context(more_explain).into()) +} + +pub fn dylib_path_envvar() -> &'static str { + if cfg!(windows) { + "PATH" + } else if cfg!(target_os = "macos") { + "DYLD_LIBRARY_PATH" + } else { + "LD_LIBRARY_PATH" + } +} + +pub fn dylib_path() -> Vec { + match env::var_os(dylib_path_envvar()) { + Some(var) => env::split_paths(&var).collect(), + None => Vec::new(), + } +} + +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret +} + +pub fn without_prefix<'a>(long_path: &'a Path, prefix: &'a Path) -> Option<&'a Path> { + let mut a = long_path.components(); + let mut b = prefix.components(); + loop { + match b.next() { + Some(y) => match a.next() { + Some(x) if x == y => continue, + _ => return None, + }, + None => return Some(a.as_path()), + } + } +} + +pub fn read(path: &Path) -> CargoResult { + match String::from_utf8(read_bytes(path)?) { + Ok(s) => Ok(s), + Err(_) => bail!("path at `{}` was not valid utf-8", path.display()), + } +} + +pub fn read_bytes(path: &Path) -> CargoResult> { + let res = (|| -> CargoResult<_> { + let mut ret = Vec::new(); + let mut f = File::open(path)?; + if let Ok(m) = f.metadata() { + ret.reserve(m.len() as usize + 1); + } + f.read_to_end(&mut ret)?; + Ok(ret) + })() + .chain_err(|| format!("failed to read `{}`", path.display()))?; + Ok(res) +} + +pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = File::create(path)?; + f.write_all(contents)?; + Ok(()) + })() + .chain_err(|| format!("failed to write `{}`", path.display()))?; + Ok(()) +} + +pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; + + f.write_all(contents)?; + Ok(()) + })() + .chain_err(|| internal(format!("failed to write `{}`", path.display())))?; + Ok(()) +} + +#[cfg(unix)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + use std::os::unix::prelude::*; + Ok(path.as_os_str().as_bytes()) +} +#[cfg(windows)] +pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { + match path.as_os_str().to_str() { + Some(s) => Ok(s.as_bytes()), + None => Err(format_err!("invalid non-unicode path: {}", path.display())), + } +} + +#[cfg(unix)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::os::unix::prelude::*; + use std::ffi::OsStr; + Ok(PathBuf::from(OsStr::from_bytes(bytes))) +} +#[cfg(windows)] +pub fn bytes2path(bytes: &[u8]) -> CargoResult { + use std::str; + match str::from_utf8(bytes) { + Ok(s) => Ok(PathBuf::from(s)), + Err(..) => Err(format_err!("invalid non-unicode path")), + } +} + +pub fn ancestors(path: &Path) -> PathAncestors { + PathAncestors::new(path) +} + +pub struct PathAncestors<'a> { + current: Option<&'a Path>, + stop_at: Option, +} + +impl<'a> PathAncestors<'a> { + fn new(path: &Path) -> PathAncestors { + PathAncestors { + current: Some(path), + //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. + stop_at: env::var("__CARGO_TEST_ROOT").ok().map(PathBuf::from), + } + } +} + +impl<'a> Iterator for PathAncestors<'a> { + type Item = &'a Path; + + fn next(&mut self) -> Option<&'a Path> { + if let Some(path) = self.current { + self.current = path.parent(); + + if let Some(ref stop_at) = self.stop_at { + if path == stop_at { + self.current = None; + } + } + + Some(path) + } else { + None + } + } +} + +pub fn remove_dir_all>(p: P) -> CargoResult<()> { + _remove_dir_all(p.as_ref()) +} + +fn _remove_dir_all(p: &Path) -> CargoResult<()> { + if p.symlink_metadata()?.file_type().is_symlink() { + return remove_file(p); + } + let entries = p.read_dir() + .chain_err(|| format!("failed to read directory `{}`", p.display()))?; + for entry in entries { + let entry = entry?; + let path = entry.path(); + if entry.file_type()?.is_dir() { + remove_dir_all(&path)?; + } else { + remove_file(&path)?; + } + } + remove_dir(&p) +} + +pub fn remove_dir>(p: P) -> CargoResult<()> { + _remove_dir(p.as_ref()) +} + +fn _remove_dir(p: &Path) -> CargoResult<()> { + fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?; + Ok(()) +} + +pub fn remove_file>(p: P) -> CargoResult<()> { + _remove_file(p.as_ref()) +} + +fn _remove_file(p: &Path) -> CargoResult<()> { + let mut err = match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if err.kind() == io::ErrorKind::PermissionDenied { + if set_not_readonly(p).unwrap_or(false) { + match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => err = e, + } + } + } + + Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?; + Ok(()) +} + +fn set_not_readonly(p: &Path) -> io::Result { + let mut perms = p.metadata()?.permissions(); + if !perms.readonly() { + return Ok(false); + } + perms.set_readonly(false); + fs::set_permissions(p, perms)?; + Ok(true) +} diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs new file mode 100644 index 000000000..bf70044b8 --- /dev/null +++ b/src/cargo/util/process_builder.rs @@ -0,0 +1,345 @@ +use std::collections::HashMap; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fmt; +use std::path::Path; +use std::process::{Command, Output, Stdio}; + +use jobserver::Client; +use shell_escape::escape; + +use util::{process_error, CargoError, CargoResult, CargoResultExt, read2}; + +/// A builder object for an external process, similar to `std::process::Command`. +#[derive(Clone, Debug)] +pub struct ProcessBuilder { + /// The program to execute. + program: OsString, + /// A list of arguments to pass to the program. + args: Vec, + /// Any environment variables that should be set for the program. + env: HashMap>, + /// Which directory to run the program from. + cwd: Option, + /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + jobserver: Option, +} + +impl fmt::Display for ProcessBuilder { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "`{}", self.program.to_string_lossy())?; + + for arg in &self.args { + write!(f, " {}", escape(arg.to_string_lossy()))?; + } + + write!(f, "`") + } +} + +impl ProcessBuilder { + /// (chainable) Set the executable for the process. + pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { + self.program = program.as_ref().to_os_string(); + self + } + + /// (chainable) Add an arg to the args list. + pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { + self.args.push(arg.as_ref().to_os_string()); + self + } + + /// (chainable) Add many args to the args list. + pub fn args>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { + self.args + .extend(arguments.iter().map(|t| t.as_ref().to_os_string())); + self + } + + /// (chainable) Replace args with new args list + pub fn args_replace>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { + self.args = arguments + .iter() + .map(|t| t.as_ref().to_os_string()) + .collect(); + self + } + + /// (chainable) Set the current working directory of the process + pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { + self.cwd = Some(path.as_ref().to_os_string()); + self + } + + /// (chainable) Set an environment variable for the process. + pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { + self.env + .insert(key.to_string(), Some(val.as_ref().to_os_string())); + self + } + + /// (chainable) Unset an environment variable for the process. + pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { + self.env.insert(key.to_string(), None); + self + } + + /// Get the executable name. + pub fn get_program(&self) -> &OsString { + &self.program + } + + /// Get the program arguments + pub fn get_args(&self) -> &[OsString] { + &self.args + } + + /// Get the current working directory for the process + pub fn get_cwd(&self) -> Option<&Path> { + self.cwd.as_ref().map(Path::new) + } + + /// Get an environment variable as the process will see it (will inherit from environment + /// unless explicitally unset). + pub fn get_env(&self, var: &str) -> Option { + self.env + .get(var) + .cloned() + .or_else(|| Some(env::var_os(var))) + .and_then(|s| s) + } + + /// Get all environment variables explicitally set or unset for the process (not inherited + /// vars). + pub fn get_envs(&self) -> &HashMap> { + &self.env + } + + /// Set the `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { + self.jobserver = Some(jobserver.clone()); + self + } + + /// Run the process, waiting for completion, and mapping non-success exit codes to an error. + pub fn exec(&self) -> CargoResult<()> { + let mut command = self.build_command(); + let exit = command.status().chain_err(|| { + process_error( + &format!("could not execute process `{}`", self.debug_string()), + None, + None, + ) + })?; + + if exit.success() { + Ok(()) + } else { + Err(process_error( + &format!( + "process didn't exit successfully: `{}`", + self.debug_string() + ), + Some(&exit), + None, + ).into()) + } + } + + /// On unix, executes the process using the unix syscall `execvp`, which will block this + /// process, and will only return if there is an error. On windows this is a synonym for + /// `exec`. + #[cfg(unix)] + pub fn exec_replace(&self) -> CargoResult<()> { + use std::os::unix::process::CommandExt; + + let mut command = self.build_command(); + let error = command.exec(); + Err(CargoError::from(error) + .context(process_error( + &format!("could not execute process `{}`", self.debug_string()), + None, + None, + )) + .into()) + } + + /// On unix, executes the process using the unix syscall `execvp`, which will block this + /// process, and will only return if there is an error. On windows this is a synonym for + /// `exec`. + #[cfg(windows)] + pub fn exec_replace(&self) -> CargoResult<()> { + self.exec() + } + + /// Execute the process, returning the stdio output, or an error if non-zero exit status. + pub fn exec_with_output(&self) -> CargoResult { + let mut command = self.build_command(); + + let output = command.output().chain_err(|| { + process_error( + &format!("could not execute process `{}`", self.debug_string()), + None, + None, + ) + })?; + + if output.status.success() { + Ok(output) + } else { + Err(process_error( + &format!( + "process didn't exit successfully: `{}`", + self.debug_string() + ), + Some(&output.status), + Some(&output), + ).into()) + } + } + + /// Execute a command, passing each line of stdout and stderr to the supplied callbacks, which + /// can mutate the string data. + /// + /// If any invocations of these function return an error, it will be propagated. + /// + /// Optionally, output can be passed to errors using `print_output` + pub fn exec_with_streaming( + &self, + on_stdout_line: &mut FnMut(&str) -> CargoResult<()>, + on_stderr_line: &mut FnMut(&str) -> CargoResult<()>, + print_output: bool, + ) -> CargoResult { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + + let mut cmd = self.build_command(); + cmd.stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .stdin(Stdio::null()); + + let mut callback_error = None; + let status = (|| { + let mut child = cmd.spawn()?; + let out = child.stdout.take().unwrap(); + let err = child.stderr.take().unwrap(); + read2(out, err, &mut |is_out, data, eof| { + let idx = if eof { + data.len() + } else { + match data.iter().rposition(|b| *b == b'\n') { + Some(i) => i + 1, + None => return, + } + }; + let data = data.drain(..idx); + let dst = if is_out { &mut stdout } else { &mut stderr }; + let start = dst.len(); + dst.extend(data); + for line in String::from_utf8_lossy(&dst[start..]).lines() { + if callback_error.is_some() { + break; + } + let callback_result = if is_out { + on_stdout_line(line) + } else { + on_stderr_line(line) + }; + if let Err(e) = callback_result { + callback_error = Some(e); + } + } + })?; + child.wait() + })() + .chain_err(|| { + process_error( + &format!("could not execute process `{}`", self.debug_string()), + None, + None, + ) + })?; + let output = Output { + stdout, + stderr, + status, + }; + + { + let to_print = if print_output { Some(&output) } else { None }; + if !output.status.success() { + return Err(process_error( + &format!( + "process didn't exit successfully: `{}`", + self.debug_string() + ), + Some(&output.status), + to_print, + ).into()); + } else if let Some(e) = callback_error { + let cx = process_error( + &format!("failed to parse process output: `{}`", self.debug_string()), + Some(&output.status), + to_print, + ); + return Err(CargoError::from(e).context(cx).into()); + } + } + + Ok(output) + } + + /// Converts ProcessBuilder into a `std::process::Command`, and handles the jobserver if + /// present. + pub fn build_command(&self) -> Command { + let mut command = Command::new(&self.program); + if let Some(cwd) = self.get_cwd() { + command.current_dir(cwd); + } + for arg in &self.args { + command.arg(arg); + } + for (k, v) in &self.env { + match *v { + Some(ref v) => { + command.env(k, v); + } + None => { + command.env_remove(k); + } + } + } + if let Some(ref c) = self.jobserver { + c.configure(&mut command); + } + command + } + + /// Get the command line for the process as a string. + fn debug_string(&self) -> String { + let mut program = format!("{}", self.program.to_string_lossy()); + for arg in &self.args { + program.push(' '); + program.push_str(&format!("{}", arg.to_string_lossy())); + } + program + } +} + +/// A helper function to create a `ProcessBuilder`. +pub fn process>(cmd: T) -> ProcessBuilder { + ProcessBuilder { + program: cmd.as_ref().to_os_string(), + args: Vec::new(), + cwd: None, + env: HashMap::new(), + jobserver: None, + } +} diff --git a/src/cargo/util/profile.rs b/src/cargo/util/profile.rs new file mode 100644 index 000000000..7d32adc3f --- /dev/null +++ b/src/cargo/util/profile.rs @@ -0,0 +1,81 @@ +use std::env; +use std::fmt; +use std::mem; +use std::time; +use std::iter::repeat; +use std::cell::RefCell; + +thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); +thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); + +type Message = (usize, u64, String); + +pub struct Profiler { + desc: String, +} + +fn enabled_level() -> Option { + env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok()) +} + +pub fn start(desc: T) -> Profiler { + if enabled_level().is_none() { + return Profiler { + desc: String::new(), + }; + } + + PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now())); + + Profiler { + desc: desc.to_string(), + } +} + +impl Drop for Profiler { + fn drop(&mut self) { + let enabled = match enabled_level() { + Some(i) => i, + None => return, + }; + + let start = PROFILE_STACK.with(|stack| stack.borrow_mut().pop().unwrap()); + let duration = start.elapsed(); + let duration_ms = + duration.as_secs() * 1000 + u64::from(duration.subsec_nanos() / 1_000_000); + + let stack_len = PROFILE_STACK.with(|stack| stack.borrow().len()); + if stack_len == 0 { + fn print(lvl: usize, msgs: &[Message], enabled: usize) { + if lvl > enabled { + return; + } + let mut last = 0; + for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { + if l != lvl { + continue; + } + println!( + "{} {:6}ms - {}", + repeat(" ").take(lvl + 1).collect::(), + time, + msg + ); + + print(lvl + 1, &msgs[last..i], enabled); + last = i; + } + } + MESSAGES.with(|msgs_rc| { + let mut msgs = msgs_rc.borrow_mut(); + msgs.push((0, duration_ms, mem::replace(&mut self.desc, String::new()))); + print(0, &msgs, enabled); + }); + } else { + MESSAGES.with(|msgs| { + let msg = mem::replace(&mut self.desc, String::new()); + msgs.borrow_mut().push((stack_len, duration_ms, msg)); + }); + } + } +} diff --git a/src/cargo/util/progress.rs b/src/cargo/util/progress.rs new file mode 100644 index 000000000..0a5af5b5e --- /dev/null +++ b/src/cargo/util/progress.rs @@ -0,0 +1,136 @@ +use std::cmp; +use std::env; +use std::iter; +use std::time::{Duration, Instant}; + +use core::shell::Verbosity; +use util::{CargoResult, Config}; + +pub struct Progress<'cfg> { + state: Option>, +} + +struct State<'cfg> { + config: &'cfg Config, + width: usize, + first: bool, + last_update: Instant, + name: String, + done: bool, +} + +impl<'cfg> Progress<'cfg> { + pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> { + // report no progress when -q (for quiet) or TERM=dumb are set + let dumb = match env::var("TERM") { + Ok(term) => term == "dumb", + Err(_) => false, + }; + if cfg.shell().verbosity() == Verbosity::Quiet || dumb { + return Progress { state: None }; + } + + Progress { + state: cfg.shell().err_width().map(|n| State { + config: cfg, + width: cmp::min(n, 80), + first: true, + last_update: Instant::now(), + name: name.to_string(), + done: false, + }), + } + } + + pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> { + match self.state { + Some(ref mut s) => s.tick(cur, max), + None => Ok(()), + } + } +} + +impl<'cfg> State<'cfg> { + fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> { + if self.done { + return Ok(()); + } + + // Don't update too often as it can cause excessive performance loss + // just putting stuff onto the terminal. We also want to avoid + // flickering by not drawing anything that goes away too quickly. As a + // result we've got two branches here: + // + // 1. If we haven't drawn anything, we wait for a period of time to + // actually start drawing to the console. This ensures that + // short-lived operations don't flicker on the console. Currently + // there's a 500ms delay to when we first draw something. + // 2. If we've drawn something, then we rate limit ourselves to only + // draw to the console every so often. Currently there's a 100ms + // delay between updates. + if self.first { + let delay = Duration::from_millis(500); + if self.last_update.elapsed() < delay { + return Ok(()); + } + self.first = false; + } else { + let interval = Duration::from_millis(100); + if self.last_update.elapsed() < interval { + return Ok(()); + } + } + self.last_update = Instant::now(); + + // Render the percentage at the far right and then figure how long the + // progress bar is + let pct = (cur as f64) / (max as f64); + let pct = if !pct.is_finite() { 0.0 } else { pct }; + let stats = format!(" {:6.02}%", pct * 100.0); + let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */; + let display_width = match self.width.checked_sub(extra_len) { + Some(n) => n, + None => return Ok(()), + }; + let mut string = String::from("["); + let hashes = display_width as f64 * pct; + let hashes = hashes as usize; + + // Draw the `===>` + if hashes > 0 { + for _ in 0..hashes - 1 { + string.push_str("="); + } + if cur == max { + self.done = true; + string.push_str("="); + } else { + string.push_str(">"); + } + } + + // Draw the empty space we have left to do + for _ in 0..(display_width - hashes) { + string.push_str(" "); + } + string.push_str("]"); + string.push_str(&stats); + + // Write out a pretty header, then the progress bar itself, and then + // return back to the beginning of the line for the next print. + self.config.shell().status_header(&self.name)?; + write!(self.config.shell().err(), "{}\r", string)?; + Ok(()) + } +} + +fn clear(width: usize, config: &Config) { + let blank = iter::repeat(" ").take(width).collect::(); + drop(write!(config.shell().err(), "{}\r", blank)); +} + +impl<'cfg> Drop for State<'cfg> { + fn drop(&mut self) { + clear(self.width, self.config); + } +} diff --git a/src/cargo/util/read2.rs b/src/cargo/util/read2.rs new file mode 100644 index 000000000..13a50a724 --- /dev/null +++ b/src/cargo/util/read2.rs @@ -0,0 +1,185 @@ +pub use self::imp::read2; + +#[cfg(unix)] +mod imp { + use std::io::prelude::*; + use std::io; + use std::mem; + use std::os::unix::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + use libc; + + pub fn read2( + mut out_pipe: ChildStdout, + mut err_pipe: ChildStderr, + data: &mut FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + unsafe { + libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + } + + let mut out_done = false; + let mut err_done = false; + let mut out = Vec::new(); + let mut err = Vec::new(); + + let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; + fds[0].fd = out_pipe.as_raw_fd(); + fds[0].events = libc::POLLIN; + fds[1].fd = err_pipe.as_raw_fd(); + fds[1].events = libc::POLLIN; + let mut nfds = 2; + let mut errfd = 1; + + while nfds > 0 { + // wait for either pipe to become readable using `select` + let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; + if r == -1 { + let err = io::Error::last_os_error(); + if err.kind() == io::ErrorKind::Interrupted { + continue; + } + return Err(err); + } + + // Read as much as we can from each pipe, ignoring EWOULDBLOCK or + // EAGAIN. If we hit EOF, then this will happen because the underlying + // reader will return Ok(0), in which case we'll see `Ok` ourselves. In + // this case we flip the other fd back into blocking mode and read + // whatever's leftover on that file descriptor. + let handle = |res: io::Result<_>| match res { + Ok(_) => Ok(true), + Err(e) => { + if e.kind() == io::ErrorKind::WouldBlock { + Ok(false) + } else { + Err(e) + } + } + }; + if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { + err_done = true; + nfds -= 1; + } + data(false, &mut err, err_done); + if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { + out_done = true; + fds[0].fd = err_pipe.as_raw_fd(); + errfd = 0; + nfds -= 1; + } + data(true, &mut out, out_done); + } + Ok(()) + } +} + +#[cfg(windows)] +mod imp { + extern crate miow; + extern crate winapi; + + use std::io; + use std::os::windows::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + use std::slice; + + use self::miow::iocp::{CompletionPort, CompletionStatus}; + use self::miow::pipe::NamedPipe; + use self::miow::Overlapped; + use self::winapi::shared::winerror::ERROR_BROKEN_PIPE; + + struct Pipe<'a> { + dst: &'a mut Vec, + overlapped: Overlapped, + pipe: NamedPipe, + done: bool, + } + + pub fn read2( + out_pipe: ChildStdout, + err_pipe: ChildStderr, + data: &mut FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + let mut out = Vec::new(); + let mut err = Vec::new(); + + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; + + unsafe { + let mut out_pipe = Pipe::new(out_pipe, &mut out); + let mut err_pipe = Pipe::new(err_pipe, &mut err); + + out_pipe.read()?; + err_pipe.read()?; + + let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; + + while !out_pipe.done || !err_pipe.done { + for status in port.get_many(&mut status, None)? { + if status.token() == 0 { + out_pipe.complete(status); + data(true, out_pipe.dst, out_pipe.done); + out_pipe.read()?; + } else { + err_pipe.complete(status); + data(false, err_pipe.dst, err_pipe.done); + err_pipe.read()?; + } + } + } + + Ok(()) + } + } + + impl<'a> Pipe<'a> { + unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { + Pipe { + dst, + pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + overlapped: Overlapped::zero(), + done: false, + } + } + + unsafe fn read(&mut self) -> io::Result<()> { + let dst = slice_to_end(self.dst); + match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + Ok(_) => Ok(()), + Err(e) => { + if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { + self.done = true; + Ok(()) + } else { + Err(e) + } + } + } + } + + unsafe fn complete(&mut self, status: &CompletionStatus) { + let prev = self.dst.len(); + self.dst.set_len(prev + status.bytes_transferred() as usize); + if status.bytes_transferred() == 0 { + self.done = true; + } + } + } + + unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { + if v.capacity() == 0 { + v.reserve(16); + } + if v.capacity() == v.len() { + v.reserve(1); + } + slice::from_raw_parts_mut( + v.as_mut_ptr().offset(v.len() as isize), + v.capacity() - v.len(), + ) + } +} diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs new file mode 100644 index 000000000..147212048 --- /dev/null +++ b/src/cargo/util/rustc.rs @@ -0,0 +1,63 @@ +use std::path::PathBuf; + +use util::{self, internal, CargoResult, ProcessBuilder}; + +/// Information on the `rustc` executable +#[derive(Debug)] +pub struct Rustc { + /// The location of the exe + pub path: PathBuf, + /// An optional program that will be passed the path of the rust exe as its first argument, and + /// rustc args following this. + pub wrapper: Option, + /// Verbose version information (the output of `rustc -vV`) + pub verbose_version: String, + /// The host triple (arch-platform-OS), this comes from verbose_version. + pub host: String, +} + +impl Rustc { + /// Run the compiler at `path` to learn various pieces of information about + /// it, with an optional wrapper. + /// + /// If successful this function returns a description of the compiler along + /// with a list of its capabilities. + pub fn new(path: PathBuf, wrapper: Option) -> CargoResult { + let mut cmd = util::process(&path); + cmd.arg("-vV"); + + let output = cmd.exec_with_output()?; + + let verbose_version = String::from_utf8(output.stdout) + .map_err(|_| internal("rustc -v didn't return utf8 output"))?; + + let host = { + let triple = verbose_version + .lines() + .find(|l| l.starts_with("host: ")) + .map(|l| &l[6..]) + .ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?; + triple.to_string() + }; + + Ok(Rustc { + path, + wrapper, + verbose_version, + host, + }) + } + + /// Get a process builder set up to use the found rustc version, with a wrapper if Some + pub fn process(&self) -> ProcessBuilder { + if let Some(ref wrapper) = self.wrapper { + let mut cmd = util::process(wrapper); + { + cmd.arg(&self.path); + } + cmd + } else { + util::process(&self.path) + } + } +} diff --git a/src/cargo/util/sha256.rs b/src/cargo/util/sha256.rs new file mode 100644 index 000000000..604bb2941 --- /dev/null +++ b/src/cargo/util/sha256.rs @@ -0,0 +1,23 @@ +extern crate crypto_hash; +use self::crypto_hash::{Algorithm, Hasher}; +use std::io::Write; + +pub struct Sha256(Hasher); + +impl Sha256 { + pub fn new() -> Sha256 { + let hasher = Hasher::new(Algorithm::SHA256); + Sha256(hasher) + } + + pub fn update(&mut self, bytes: &[u8]) { + let _ = self.0.write_all(bytes); + } + + pub fn finish(&mut self) -> [u8; 32] { + let mut ret = [0u8; 32]; + let data = self.0.finish(); + ret.copy_from_slice(&data[..]); + ret + } +} diff --git a/src/cargo/util/to_semver.rs b/src/cargo/util/to_semver.rs new file mode 100644 index 000000000..4ffd6e3c0 --- /dev/null +++ b/src/cargo/util/to_semver.rs @@ -0,0 +1,33 @@ +use semver::Version; +use util::errors::CargoResult; + +pub trait ToSemver { + fn to_semver(self) -> CargoResult; +} + +impl ToSemver for Version { + fn to_semver(self) -> CargoResult { + Ok(self) + } +} + +impl<'a> ToSemver for &'a str { + fn to_semver(self) -> CargoResult { + match Version::parse(self) { + Ok(v) => Ok(v), + Err(..) => Err(format_err!("cannot parse '{}' as a semver", self)), + } + } +} + +impl<'a> ToSemver for &'a String { + fn to_semver(self) -> CargoResult { + (**self).to_semver() + } +} + +impl<'a> ToSemver for &'a Version { + fn to_semver(self) -> CargoResult { + Ok(self.clone()) + } +} diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs new file mode 100644 index 000000000..664c2568d --- /dev/null +++ b/src/cargo/util/to_url.rs @@ -0,0 +1,23 @@ +use std::path::Path; + +use url::Url; + +use util::CargoResult; + +/// A type that can be converted to a Url +pub trait ToUrl { + /// Performs the conversion + fn to_url(self) -> CargoResult; +} + +impl<'a> ToUrl for &'a str { + fn to_url(self) -> CargoResult { + Url::parse(self).map_err(|s| format_err!("invalid url `{}`: {}", self, s)) + } +} + +impl<'a> ToUrl for &'a Path { + fn to_url(self) -> CargoResult { + Url::from_file_path(self).map_err(|()| format_err!("invalid path url `{}`", self.display())) + } +} diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs new file mode 100644 index 000000000..092ce8f7f --- /dev/null +++ b/src/cargo/util/toml/mod.rs @@ -0,0 +1,1354 @@ +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; +use std::fmt; +use std::fs; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::str; + +use semver::{self, VersionReq}; +use serde::ser; +use serde::de::{self, Deserialize}; +use serde_ignored; +use toml; +use url::Url; + +use core::{GitReference, PackageIdSpec, Profiles, SourceId, WorkspaceConfig, WorkspaceRootConfig}; +use core::{Dependency, Manifest, PackageId, Summary, Target}; +use core::{EitherManifest, Epoch, Feature, Features, VirtualManifest}; +use core::dependency::{Kind, Platform}; +use core::manifest::{LibKind, Lto, ManifestMetadata, Profile}; +use sources::CRATES_IO; +use util::paths; +use util::{self, Config, ToUrl}; +use util::errors::{CargoError, CargoResult, CargoResultExt}; + +mod targets; +use self::targets::targets; + +pub fn read_manifest( + path: &Path, + source_id: &SourceId, + config: &Config, +) -> CargoResult<(EitherManifest, Vec)> { + trace!( + "read_manifest; path={}; source-id={}", + path.display(), + source_id + ); + let contents = paths::read(path)?; + + let ret = do_read_manifest(&contents, path, source_id, config) + .chain_err(|| format!("failed to parse manifest at `{}`", path.display()))?; + Ok(ret) +} + +fn do_read_manifest( + contents: &str, + manifest_file: &Path, + source_id: &SourceId, + config: &Config, +) -> CargoResult<(EitherManifest, Vec)> { + let package_root = manifest_file.parent().unwrap(); + + let toml = { + let pretty_filename = + util::without_prefix(manifest_file, config.cwd()).unwrap_or(manifest_file); + parse(contents, pretty_filename, config)? + }; + + let mut unused = BTreeSet::new(); + let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| { + let mut key = String::new(); + stringify(&mut key, &path); + unused.insert(key); + })?; + + let manifest = Rc::new(manifest); + return if manifest.project.is_some() || manifest.package.is_some() { + let (mut manifest, paths) = + TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?; + for key in unused { + manifest.add_warning(format!("unused manifest key: {}", key)); + } + if !manifest.targets().iter().any(|t| !t.is_custom_build()) { + bail!( + "no targets specified in the manifest\n \ + either src/lib.rs, src/main.rs, a [lib] section, or \ + [[bin]] section must be present" + ) + } + Ok((EitherManifest::Real(manifest), paths)) + } else { + let (m, paths) = + TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?; + Ok((EitherManifest::Virtual(m), paths)) + }; + + fn stringify(dst: &mut String, path: &serde_ignored::Path) { + use serde_ignored::Path; + + match *path { + Path::Root => {} + Path::Seq { parent, index } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(&index.to_string()); + } + Path::Map { parent, ref key } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(key); + } + Path::Some { parent } + | Path::NewtypeVariant { parent } + | Path::NewtypeStruct { parent } => stringify(dst, parent), + } + } +} + +pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult { + let first_error = match toml.parse() { + Ok(ret) => return Ok(ret), + Err(e) => e, + }; + + let mut second_parser = toml::de::Deserializer::new(toml); + second_parser.set_require_newline_after_table(false); + if let Ok(ret) = toml::Value::deserialize(&mut second_parser) { + let msg = format!( + "\ +TOML file found which contains invalid syntax and will soon not parse +at `{}`. + +The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future.", + file.display() + ); + config.shell().warn(&msg)?; + return Ok(ret); + } + + let first_error = CargoError::from(first_error); + Err(first_error.context("could not parse input as TOML").into()) +} + +type TomlLibTarget = TomlTarget; +type TomlBinTarget = TomlTarget; +type TomlExampleTarget = TomlTarget; +type TomlTestTarget = TomlTarget; +type TomlBenchTarget = TomlTarget; + +#[derive(Debug, Serialize)] +#[serde(untagged)] +pub enum TomlDependency { + Simple(String), + Detailed(DetailedTomlDependency), +} + +impl<'de> de::Deserialize<'de> for TomlDependency { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct TomlDependencyVisitor; + + impl<'de> de::Visitor<'de> for TomlDependencyVisitor { + type Value = TomlDependency; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str( + "a version string like \"0.9.8\" or a \ + detailed dependency like { version = \"0.9.8\" }", + ) + } + + fn visit_str(self, s: &str) -> Result + where + E: de::Error, + { + Ok(TomlDependency::Simple(s.to_owned())) + } + + fn visit_map(self, map: V) -> Result + where + V: de::MapAccess<'de>, + { + let mvd = de::value::MapAccessDeserializer::new(map); + DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed) + } + } + + deserializer.deserialize_any(TomlDependencyVisitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +#[serde(rename_all = "kebab-case")] +pub struct DetailedTomlDependency { + version: Option, + registry: Option, + registry_index: Option, + path: Option, + git: Option, + branch: Option, + tag: Option, + rev: Option, + features: Option>, + optional: Option, + default_features: Option, + #[serde(rename = "default_features")] default_features2: Option, + package: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct TomlManifest { + cargo_features: Option>, + package: Option>, + project: Option>, + profile: Option, + lib: Option, + bin: Option>, + example: Option>, + test: Option>, + bench: Option>, + dependencies: Option>, + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + features: Option>>, + target: Option>, + replace: Option>, + patch: Option>>, + workspace: Option, + badges: Option>>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct TomlProfiles { + test: Option, + doc: Option, + bench: Option, + dev: Option, + release: Option, +} + +#[derive(Clone, Debug)] +pub struct TomlOptLevel(String); + +impl<'de> de::Deserialize<'de> for TomlOptLevel { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = TomlOptLevel; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("an optimization level") + } + + fn visit_i64(self, value: i64) -> Result + where + E: de::Error, + { + Ok(TomlOptLevel(value.to_string())) + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + if value == "s" || value == "z" { + Ok(TomlOptLevel(value.to_string())) + } else { + Err(E::custom(format!( + "must be an integer, `z`, or `s`, \ + but found: {}", + value + ))) + } + } + } + + d.deserialize_u32(Visitor) + } +} + +impl ser::Serialize for TomlOptLevel { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + match self.0.parse::() { + Ok(n) => n.serialize(serializer), + Err(_) => self.0.serialize(serializer), + } + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum U32OrBool { + U32(u32), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for U32OrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = U32OrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a boolean or an integer") + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(U32OrBool::Bool(b)) + } + + fn visit_i64(self, u: i64) -> Result + where + E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + + fn visit_u64(self, u: u64) -> Result + where + E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct TomlProfile { + #[serde(rename = "opt-level")] opt_level: Option, + lto: Option, + #[serde(rename = "codegen-units")] codegen_units: Option, + debug: Option, + #[serde(rename = "debug-assertions")] debug_assertions: Option, + rpath: Option, + panic: Option, + #[serde(rename = "overflow-checks")] overflow_checks: Option, + incremental: Option, +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum StringOrBool { + String(String), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for StringOrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = StringOrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a boolean or a string") + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(StringOrBool::Bool(b)) + } + + fn visit_str(self, s: &str) -> Result + where + E: de::Error, + { + Ok(StringOrBool::String(s.to_string())) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum VecStringOrBool { + VecString(Vec), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for VecStringOrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = VecStringOrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a boolean or vector of strings") + } + + fn visit_seq(self, v: V) -> Result + where + V: de::SeqAccess<'de>, + { + let seq = de::value::SeqAccessDeserializer::new(v); + Vec::deserialize(seq).map(VecStringOrBool::VecString) + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(VecStringOrBool::Bool(b)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct TomlProject { + name: String, + version: semver::Version, + authors: Option>, + build: Option, + links: Option, + exclude: Option>, + include: Option>, + publish: Option, + #[serde(rename = "publish-lockfile")] publish_lockfile: Option, + workspace: Option, + #[serde(rename = "im-a-teapot")] im_a_teapot: Option, + + // package metadata + description: Option, + homepage: Option, + documentation: Option, + readme: Option, + keywords: Option>, + categories: Option>, + license: Option, + #[serde(rename = "license-file")] license_file: Option, + repository: Option, + metadata: Option, + rust: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TomlWorkspace { + members: Option>, + #[serde(rename = "default-members")] default_members: Option>, + exclude: Option>, +} + +impl TomlProject { + pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult { + PackageId::new(&self.name, self.version.clone(), source_id) + } +} + +struct Context<'a, 'b> { + pkgid: Option<&'a PackageId>, + deps: &'a mut Vec, + source_id: &'a SourceId, + nested_paths: &'a mut Vec, + config: &'b Config, + warnings: &'a mut Vec, + platform: Option, + root: &'a Path, + features: &'a Features, +} + +impl TomlManifest { + pub fn prepare_for_publish(&self, config: &Config) -> CargoResult { + let mut package = self.package + .as_ref() + .or_else(|| self.project.as_ref()) + .unwrap() + .clone(); + package.workspace = None; + return Ok(TomlManifest { + package: Some(package), + project: None, + profile: self.profile.clone(), + lib: self.lib.clone(), + bin: self.bin.clone(), + example: self.example.clone(), + test: self.test.clone(), + bench: self.bench.clone(), + dependencies: map_deps(config, self.dependencies.as_ref())?, + dev_dependencies: map_deps( + config, + self.dev_dependencies + .as_ref() + .or_else(|| self.dev_dependencies2.as_ref()), + )?, + dev_dependencies2: None, + build_dependencies: map_deps( + config, + self.build_dependencies + .as_ref() + .or_else(|| self.build_dependencies2.as_ref()), + )?, + build_dependencies2: None, + features: self.features.clone(), + target: match self.target.as_ref().map(|target_map| { + target_map + .iter() + .map(|(k, v)| { + Ok(( + k.clone(), + TomlPlatform { + dependencies: map_deps(config, v.dependencies.as_ref())?, + dev_dependencies: map_deps( + config, + v.dev_dependencies + .as_ref() + .or_else(|| v.dev_dependencies2.as_ref()), + )?, + dev_dependencies2: None, + build_dependencies: map_deps( + config, + v.build_dependencies + .as_ref() + .or_else(|| v.build_dependencies2.as_ref()), + )?, + build_dependencies2: None, + }, + )) + }) + .collect() + }) { + Some(Ok(v)) => Some(v), + Some(Err(e)) => return Err(e), + None => None, + }, + replace: None, + patch: None, + workspace: None, + badges: self.badges.clone(), + cargo_features: self.cargo_features.clone(), + }); + + fn map_deps( + config: &Config, + deps: Option<&BTreeMap>, + ) -> CargoResult>> { + let deps = match deps { + Some(deps) => deps, + None => return Ok(None), + }; + let deps = deps.iter() + .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?))) + .collect::>>()?; + Ok(Some(deps)) + } + + fn map_dependency(config: &Config, dep: &TomlDependency) -> CargoResult { + match *dep { + TomlDependency::Detailed(ref d) => { + let mut d = d.clone(); + d.path.take(); // path dependencies become crates.io deps + // registry specifications are elaborated to the index URL + if let Some(registry) = d.registry.take() { + let src = SourceId::alt_registry(config, ®istry)?; + d.registry_index = Some(src.url().to_string()); + } + Ok(TomlDependency::Detailed(d)) + } + TomlDependency::Simple(ref s) => { + Ok(TomlDependency::Detailed(DetailedTomlDependency { + version: Some(s.clone()), + ..Default::default() + })) + } + } + } + } + + fn to_real_manifest( + me: &Rc, + source_id: &SourceId, + package_root: &Path, + config: &Config, + ) -> CargoResult<(Manifest, Vec)> { + let mut nested_paths = vec![]; + let mut warnings = vec![]; + let mut errors = vec![]; + + // Parse features first so they will be available when parsing other parts of the toml + let empty = Vec::new(); + let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); + let features = Features::new(&cargo_features, &mut warnings)?; + + let project = me.project.as_ref().or_else(|| me.package.as_ref()); + let project = project.ok_or_else(|| format_err!("no `package` section found"))?; + + let package_name = project.name.trim(); + if package_name.is_empty() { + bail!("package name cannot be an empty string") + } + + let pkgid = project.to_package_id(source_id)?; + + // If we have no lib at all, use the inferred lib if available + // If we have a lib with a path, we're done + // If we have a lib with no path, use the inferred lib or_else package name + let targets = targets( + me, + package_name, + package_root, + &project.build, + &mut warnings, + &mut errors, + )?; + + if targets.is_empty() { + debug!("manifest has no build targets"); + } + + if let Err(e) = unique_build_targets(&targets, package_root) { + warnings.push(format!( + "file found to be present in multiple \ + build targets: {}", + e + )); + } + + let mut deps = Vec::new(); + let replace; + let patch; + + { + let mut cx = Context { + pkgid: Some(&pkgid), + deps: &mut deps, + source_id, + nested_paths: &mut nested_paths, + config, + warnings: &mut warnings, + features: &features, + platform: None, + root: package_root, + }; + + fn process_dependencies( + cx: &mut Context, + new_deps: Option<&BTreeMap>, + kind: Option, + ) -> CargoResult<()> { + let dependencies = match new_deps { + Some(dependencies) => dependencies, + None => return Ok(()), + }; + for (n, v) in dependencies.iter() { + let dep = v.to_dependency(n, cx, kind)?; + cx.deps.push(dep); + } + + Ok(()) + } + + // Collect the deps + process_dependencies(&mut cx, me.dependencies.as_ref(), None)?; + let dev_deps = me.dev_dependencies + .as_ref() + .or_else(|| me.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + let build_deps = me.build_dependencies + .as_ref() + .or_else(|| me.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + + for (name, platform) in me.target.iter().flat_map(|t| t) { + cx.platform = Some(name.parse()?); + process_dependencies(&mut cx, platform.dependencies.as_ref(), None)?; + let build_deps = platform + .build_dependencies + .as_ref() + .or_else(|| platform.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + let dev_deps = platform + .dev_dependencies + .as_ref() + .or_else(|| platform.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + } + + replace = me.replace(&mut cx)?; + patch = me.patch(&mut cx)?; + } + + { + let mut names_sources = BTreeMap::new(); + for dep in &deps { + let name = dep.name(); + let prev = names_sources.insert(name, dep.source_id()); + if prev.is_some() && prev != Some(dep.source_id()) { + bail!( + "Dependency '{}' has different source paths depending on the build \ + target. Each dependency must have a single canonical source path \ + irrespective of build target.", + name + ); + } + } + } + + let exclude = project.exclude.clone().unwrap_or_default(); + let include = project.include.clone().unwrap_or_default(); + + let summary = Summary::new( + pkgid, + deps, + me.features.clone().unwrap_or_else(BTreeMap::new), + project.links.clone(), + )?; + let metadata = ManifestMetadata { + description: project.description.clone(), + homepage: project.homepage.clone(), + documentation: project.documentation.clone(), + readme: project.readme.clone(), + authors: project.authors.clone().unwrap_or_default(), + license: project.license.clone(), + license_file: project.license_file.clone(), + repository: project.repository.clone(), + keywords: project.keywords.clone().unwrap_or_default(), + categories: project.categories.clone().unwrap_or_default(), + badges: me.badges.clone().unwrap_or_default(), + links: project.links.clone(), + }; + + let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { + (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new( + &package_root, + &config.members, + &config.default_members, + &config.exclude, + )), + (None, root) => WorkspaceConfig::Member { + root: root.cloned(), + }, + (Some(..), Some(..)) => bail!( + "cannot configure both `package.workspace` and \ + `[workspace]`, only one can be specified" + ), + }; + let profiles = build_profiles(&me.profile); + let publish = match project.publish { + Some(VecStringOrBool::VecString(ref vecstring)) => { + features + .require(Feature::alternative_registries()) + .chain_err(|| { + "the `publish` manifest key is unstable for anything other than a value of true or false" + })?; + Some(vecstring.clone()) + } + Some(VecStringOrBool::Bool(false)) => Some(vec![]), + None | Some(VecStringOrBool::Bool(true)) => None, + }; + + let publish_lockfile = match project.publish_lockfile { + Some(b) => { + features.require(Feature::publish_lockfile())?; + b + } + None => false, + }; + + let epoch = if let Some(ref epoch) = project.rust { + features + .require(Feature::epoch()) + .chain_err(|| "epoches are unstable")?; + if let Ok(epoch) = epoch.parse() { + epoch + } else { + bail!("the `rust` key must be one of: `2015`, `2018`") + } + } else { + Epoch::Epoch2015 + }; + let mut manifest = Manifest::new( + summary, + targets, + exclude, + include, + project.links.clone(), + metadata, + profiles, + publish, + publish_lockfile, + replace, + patch, + workspace_config, + features, + epoch, + project.im_a_teapot, + Rc::clone(me), + ); + if project.license_file.is_some() && project.license.is_some() { + manifest.add_warning( + "only one of `license` or \ + `license-file` is necessary" + .to_string(), + ); + } + for warning in warnings { + manifest.add_warning(warning); + } + for error in errors { + manifest.add_critical_warning(error); + } + + manifest.feature_gate()?; + + Ok((manifest, nested_paths)) + } + + fn to_virtual_manifest( + me: &Rc, + source_id: &SourceId, + root: &Path, + config: &Config, + ) -> CargoResult<(VirtualManifest, Vec)> { + if me.project.is_some() { + bail!("virtual manifests do not define [project]"); + } + if me.package.is_some() { + bail!("virtual manifests do not define [package]"); + } + if me.lib.is_some() { + bail!("virtual manifests do not specify [lib]"); + } + if me.bin.is_some() { + bail!("virtual manifests do not specify [[bin]]"); + } + if me.example.is_some() { + bail!("virtual manifests do not specify [[example]]"); + } + if me.test.is_some() { + bail!("virtual manifests do not specify [[test]]"); + } + if me.bench.is_some() { + bail!("virtual manifests do not specify [[bench]]"); + } + + let mut nested_paths = Vec::new(); + let mut warnings = Vec::new(); + let mut deps = Vec::new(); + let empty = Vec::new(); + let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); + let features = Features::new(&cargo_features, &mut warnings)?; + + let (replace, patch) = { + let mut cx = Context { + pkgid: None, + deps: &mut deps, + source_id, + nested_paths: &mut nested_paths, + config, + warnings: &mut warnings, + platform: None, + features: &features, + root, + }; + (me.replace(&mut cx)?, me.patch(&mut cx)?) + }; + let profiles = build_profiles(&me.profile); + let workspace_config = match me.workspace { + Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new( + &root, + &config.members, + &config.default_members, + &config.exclude, + )), + None => { + bail!("virtual manifests must be configured with [workspace]"); + } + }; + Ok(( + VirtualManifest::new(replace, patch, workspace_config, profiles), + nested_paths, + )) + } + + fn replace(&self, cx: &mut Context) -> CargoResult> { + if self.patch.is_some() && self.replace.is_some() { + bail!("cannot specify both [replace] and [patch]"); + } + let mut replace = Vec::new(); + for (spec, replacement) in self.replace.iter().flat_map(|x| x) { + let mut spec = PackageIdSpec::parse(spec).chain_err(|| { + format!( + "replacements must specify a valid semver \ + version to replace, but `{}` does not", + spec + ) + })?; + if spec.url().is_none() { + spec.set_url(CRATES_IO.parse().unwrap()); + } + + let version_specified = match *replacement { + TomlDependency::Detailed(ref d) => d.version.is_some(), + TomlDependency::Simple(..) => true, + }; + if version_specified { + bail!( + "replacements cannot specify a version \ + requirement, but found one for `{}`", + spec + ); + } + + let mut dep = replacement.to_dependency(spec.name(), cx, None)?; + { + let version = spec.version().ok_or_else(|| { + format_err!( + "replacements must specify a version \ + to replace, but `{}` does not", + spec + ) + })?; + dep.set_version_req(VersionReq::exact(version)); + } + replace.push((spec, dep)); + } + Ok(replace) + } + + fn patch(&self, cx: &mut Context) -> CargoResult>> { + let mut patch = HashMap::new(); + for (url, deps) in self.patch.iter().flat_map(|x| x) { + let url = match &url[..] { + "crates-io" => CRATES_IO.parse().unwrap(), + _ => url.to_url()?, + }; + patch.insert( + url, + deps.iter() + .map(|(name, dep)| dep.to_dependency(name, cx, None)) + .collect::>>()?, + ); + } + Ok(patch) + } + + fn maybe_custom_build( + &self, + build: &Option, + package_root: &Path, + ) -> Option { + let build_rs = package_root.join("build.rs"); + match *build { + Some(StringOrBool::Bool(false)) => None, // explicitly no build script + Some(StringOrBool::Bool(true)) => Some(build_rs.into()), + Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)), + None => { + match fs::metadata(&build_rs) { + // If there is a build.rs file next to the Cargo.toml, assume it is + // a build script + Ok(ref e) if e.is_file() => Some(build_rs.into()), + Ok(_) | Err(_) => None, + } + } + } + } +} + +/// Will check a list of build targets, and make sure the target names are unique within a vector. +/// If not, the name of the offending build target is returned. +fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> { + let mut seen = HashSet::new(); + for v in targets.iter().map(|e| package_root.join(e.src_path())) { + if !seen.insert(v.clone()) { + return Err(v.display().to_string()); + } + } + Ok(()) +} + +impl TomlDependency { + fn to_dependency( + &self, + name: &str, + cx: &mut Context, + kind: Option, + ) -> CargoResult { + match *self { + TomlDependency::Simple(ref version) => DetailedTomlDependency { + version: Some(version.clone()), + ..Default::default() + }.to_dependency(name, cx, kind), + TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind), + } + } +} + +impl DetailedTomlDependency { + fn to_dependency( + &self, + name: &str, + cx: &mut Context, + kind: Option, + ) -> CargoResult { + if self.version.is_none() && self.path.is_none() && self.git.is_none() { + let msg = format!( + "dependency ({}) specified without \ + providing a local path, Git repository, or \ + version to use. This will be considered an \ + error in future versions", + name + ); + cx.warnings.push(msg); + } + + if self.git.is_none() { + let git_only_keys = [ + (&self.branch, "branch"), + (&self.tag, "tag"), + (&self.rev, "rev"), + ]; + + for &(key, key_name) in &git_only_keys { + if key.is_some() { + let msg = format!( + "key `{}` is ignored for dependency ({}). \ + This will be considered an error in future versions", + key_name, name + ); + cx.warnings.push(msg) + } + } + } + + let registry_id = match self.registry { + Some(ref registry) => { + cx.features.require(Feature::alternative_registries())?; + SourceId::alt_registry(cx.config, registry)? + } + None => SourceId::crates_io(cx.config)?, + }; + + let new_source_id = match ( + self.git.as_ref(), + self.path.as_ref(), + self.registry.as_ref(), + self.registry_index.as_ref(), + ) { + (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!( + "dependency ({}) specification is ambiguous. \ + Only one of `git` or `registry` is allowed.", + name + ), + (_, _, Some(_), Some(_)) => bail!( + "dependency ({}) specification is ambiguous. \ + Only one of `registry` or `registry-index` is allowed.", + name + ), + (Some(git), maybe_path, _, _) => { + if maybe_path.is_some() { + let msg = format!( + "dependency ({}) specification is ambiguous. \ + Only one of `git` or `path` is allowed. \ + This will be considered an error in future versions", + name + ); + cx.warnings.push(msg) + } + + let n_details = [&self.branch, &self.tag, &self.rev] + .iter() + .filter(|d| d.is_some()) + .count(); + + if n_details > 1 { + let msg = format!( + "dependency ({}) specification is ambiguous. \ + Only one of `branch`, `tag` or `rev` is allowed. \ + This will be considered an error in future versions", + name + ); + cx.warnings.push(msg) + } + + let reference = self.branch + .clone() + .map(GitReference::Branch) + .or_else(|| self.tag.clone().map(GitReference::Tag)) + .or_else(|| self.rev.clone().map(GitReference::Rev)) + .unwrap_or_else(|| GitReference::Branch("master".to_string())); + let loc = git.to_url()?; + SourceId::for_git(&loc, reference)? + } + (None, Some(path), _, _) => { + cx.nested_paths.push(PathBuf::from(path)); + // If the source id for the package we're parsing is a path + // source, then we normalize the path here to get rid of + // components like `..`. + // + // The purpose of this is to get a canonical id for the package + // that we're depending on to ensure that builds of this package + // always end up hashing to the same value no matter where it's + // built from. + if cx.source_id.is_path() { + let path = cx.root.join(path); + let path = util::normalize_path(&path); + SourceId::for_path(&path)? + } else { + cx.source_id.clone() + } + } + (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?, + (None, None, None, Some(registry_index)) => { + let url = registry_index.to_url()?; + SourceId::for_registry(&url)? + } + (None, None, None, None) => SourceId::crates_io(cx.config)?, + }; + + let (pkg_name, rename) = match self.package { + Some(ref s) => (&s[..], Some(name)), + None => (name, None), + }; + + let version = self.version.as_ref().map(|v| &v[..]); + let mut dep = match cx.pkgid { + Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?, + None => Dependency::parse_no_deprecated(name, version, &new_source_id)?, + }; + dep.set_features(self.features.clone().unwrap_or_default()) + .set_default_features( + self.default_features + .or(self.default_features2) + .unwrap_or(true), + ) + .set_optional(self.optional.unwrap_or(false)) + .set_platform(cx.platform.clone()) + .set_registry_id(®istry_id); + if let Some(kind) = kind { + dep.set_kind(kind); + } + if let Some(rename) = rename { + cx.features.require(Feature::rename_dependency())?; + dep.set_rename(rename); + } + Ok(dep) + } +} + +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +struct TomlTarget { + name: Option, + + // The intention was to only accept `crate-type` here but historical + // versions of Cargo also accepted `crate_type`, so look for both. + #[serde(rename = "crate-type")] crate_type: Option>, + #[serde(rename = "crate_type")] crate_type2: Option>, + + path: Option, + test: Option, + doctest: Option, + bench: Option, + doc: Option, + plugin: Option, + #[serde(rename = "proc-macro")] proc_macro: Option, + #[serde(rename = "proc_macro")] proc_macro2: Option, + harness: Option, + #[serde(rename = "required-features")] required_features: Option>, +} + +#[derive(Clone)] +struct PathValue(PathBuf); + +impl<'de> de::Deserialize<'de> for PathValue { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + Ok(PathValue(String::deserialize(deserializer)?.into())) + } +} + +impl ser::Serialize for PathValue { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + self.0.serialize(serializer) + } +} + +/// Corresponds to a `target` entry, but `TomlTarget` is already used. +#[derive(Serialize, Deserialize, Debug)] +struct TomlPlatform { + dependencies: Option>, + #[serde(rename = "build-dependencies")] + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + #[serde(rename = "dev-dependencies")] + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, +} + +impl TomlTarget { + fn new() -> TomlTarget { + TomlTarget::default() + } + + fn name(&self) -> String { + match self.name { + Some(ref name) => name.clone(), + None => panic!("target name is required"), + } + } + + fn proc_macro(&self) -> Option { + self.proc_macro.or(self.proc_macro2) + } + + fn crate_types(&self) -> Option<&Vec> { + self.crate_type + .as_ref() + .or_else(|| self.crate_type2.as_ref()) + } +} + +impl fmt::Debug for PathValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +fn build_profiles(profiles: &Option) -> Profiles { + let profiles = profiles.as_ref(); + let mut profiles = Profiles { + release: merge( + Profile::default_release(), + profiles.and_then(|p| p.release.as_ref()), + ), + dev: merge( + Profile::default_dev(), + profiles.and_then(|p| p.dev.as_ref()), + ), + test: merge( + Profile::default_test(), + profiles.and_then(|p| p.test.as_ref()), + ), + test_deps: merge( + Profile::default_dev(), + profiles.and_then(|p| p.dev.as_ref()), + ), + bench: merge( + Profile::default_bench(), + profiles.and_then(|p| p.bench.as_ref()), + ), + bench_deps: merge( + Profile::default_release(), + profiles.and_then(|p| p.release.as_ref()), + ), + doc: merge( + Profile::default_doc(), + profiles.and_then(|p| p.doc.as_ref()), + ), + custom_build: Profile::default_custom_build(), + check: merge( + Profile::default_check(), + profiles.and_then(|p| p.dev.as_ref()), + ), + check_test: merge( + Profile::default_check_test(), + profiles.and_then(|p| p.dev.as_ref()), + ), + doctest: Profile::default_doctest(), + }; + // The test/bench targets cannot have panic=abort because they'll all get + // compiled with --test which requires the unwind runtime currently + profiles.test.panic = None; + profiles.bench.panic = None; + profiles.test_deps.panic = None; + profiles.bench_deps.panic = None; + return profiles; + + fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile { + let &TomlProfile { + ref opt_level, + ref lto, + codegen_units, + ref debug, + debug_assertions, + rpath, + ref panic, + ref overflow_checks, + ref incremental, + } = match toml { + Some(toml) => toml, + None => return profile, + }; + let debug = match *debug { + Some(U32OrBool::U32(debug)) => Some(Some(debug)), + Some(U32OrBool::Bool(true)) => Some(Some(2)), + Some(U32OrBool::Bool(false)) => Some(None), + None => None, + }; + Profile { + opt_level: opt_level + .clone() + .unwrap_or(TomlOptLevel(profile.opt_level)) + .0, + lto: match *lto { + Some(StringOrBool::Bool(b)) => Lto::Bool(b), + Some(StringOrBool::String(ref n)) => Lto::Named(n.clone()), + None => profile.lto, + }, + codegen_units, + rustc_args: None, + rustdoc_args: None, + debuginfo: debug.unwrap_or(profile.debuginfo), + debug_assertions: debug_assertions.unwrap_or(profile.debug_assertions), + overflow_checks: overflow_checks.unwrap_or(profile.overflow_checks), + rpath: rpath.unwrap_or(profile.rpath), + test: profile.test, + doc: profile.doc, + run_custom_build: profile.run_custom_build, + check: profile.check, + panic: panic.clone().or(profile.panic), + incremental: incremental.unwrap_or(profile.incremental), + } + } +} diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs new file mode 100644 index 000000000..fbba46453 --- /dev/null +++ b/src/cargo/util/toml/targets.rs @@ -0,0 +1,565 @@ +//! This module implements Cargo conventions for directory layout: +//! +//! * `src/lib.rs` is a library +//! * `src/main.rs` is a binary +//! * `src/bin/*.rs` are binaries +//! * `examples/*.rs` are examples +//! * `tests/*.rs` are integration tests +//! * `benches/*.rs` are benchmarks +//! +//! It is a bit tricky because we need match explicit information from `Cargo.toml` +//! with implicit info in directory layout. + +use std::path::{Path, PathBuf}; +use std::fs::{self, DirEntry}; +use std::collections::HashSet; + +use core::Target; +use ops::is_bad_artifact_name; +use util::errors::CargoResult; +use super::{LibKind, PathValue, StringOrBool, TomlBenchTarget, TomlBinTarget, TomlExampleTarget, + TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget}; + +pub fn targets( + manifest: &TomlManifest, + package_name: &str, + package_root: &Path, + custom_build: &Option, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let mut targets = Vec::new(); + + let has_lib; + + if let Some(target) = clean_lib(manifest.lib.as_ref(), package_root, package_name, warnings)? { + targets.push(target); + has_lib = true; + } else { + has_lib = false; + } + + targets.extend(clean_bins( + manifest.bin.as_ref(), + package_root, + package_name, + warnings, + errors, + has_lib, + )?); + + targets.extend(clean_examples( + manifest.example.as_ref(), + package_root, + errors, + )?); + + targets.extend(clean_tests(manifest.test.as_ref(), package_root, errors)?); + + targets.extend(clean_benches( + manifest.bench.as_ref(), + package_root, + warnings, + errors, + )?); + + // processing the custom build script + if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) { + let name = format!( + "build-script-{}", + custom_build + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("") + ); + targets.push(Target::custom_build_target( + &name, + package_root.join(custom_build), + )); + } + + Ok(targets) +} + +fn clean_lib( + toml_lib: Option<&TomlLibTarget>, + package_root: &Path, + package_name: &str, + warnings: &mut Vec, +) -> CargoResult> { + let inferred = inferred_lib(package_root); + let lib = match toml_lib { + Some(lib) => { + if let Some(ref name) = lib.name { + // XXX: other code paths dodge this validation + if name.contains('-') { + bail!("library target names cannot contain hyphens: {}", name) + } + } + Some(TomlTarget { + name: lib.name.clone().or_else(|| Some(package_name.to_owned())), + ..lib.clone() + }) + } + None => inferred.as_ref().map(|lib| TomlTarget { + name: Some(package_name.to_string()), + path: Some(PathValue(lib.clone())), + ..TomlTarget::new() + }), + }; + + let lib = match lib { + Some(ref lib) => lib, + None => return Ok(None), + }; + + validate_has_name(lib, "library", "lib")?; + + let path = match (lib.path.as_ref(), inferred) { + (Some(path), _) => package_root.join(&path.0), + (None, Some(path)) => path, + (None, None) => { + let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name())); + if legacy_path.exists() { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for library `{}`,\n\ + please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", + legacy_path.display(), + lib.name() + )); + legacy_path + } else { + bail!( + "can't find library `{}`, \ + rename file to `src/lib.rs` or specify lib.path", + lib.name() + ) + } + } + }; + + // Per the Macros 1.1 RFC: + // + // > Initially if a crate is compiled with the proc-macro crate type + // > (and possibly others) it will forbid exporting any items in the + // > crate other than those functions tagged #[proc_macro_derive] and + // > those functions must also be placed at the crate root. + // + // A plugin requires exporting plugin_registrar so a crate cannot be + // both at once. + let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) { + (_, Some(true), Some(true)) => bail!("lib.plugin and lib.proc-macro cannot both be true"), + (Some(kinds), _, _) => kinds.iter().map(|s| LibKind::from_str(s)).collect(), + (None, Some(true), _) => vec![LibKind::Dylib], + (None, _, Some(true)) => vec![LibKind::ProcMacro], + (None, _, _) => vec![LibKind::Lib], + }; + + let mut target = Target::lib_target(&lib.name(), crate_types, path); + configure(lib, &mut target); + Ok(Some(target)) +} + +fn clean_bins( + toml_bins: Option<&Vec>, + package_root: &Path, + package_name: &str, + warnings: &mut Vec, + errors: &mut Vec, + has_lib: bool, +) -> CargoResult> { + let inferred = inferred_bins(package_root, package_name); + let bins = match toml_bins { + Some(bins) => bins.clone(), + None => inferred + .iter() + .map(|&(ref name, ref path)| TomlTarget { + name: Some(name.clone()), + path: Some(PathValue(path.clone())), + ..TomlTarget::new() + }) + .collect(), + }; + + for bin in &bins { + validate_has_name(bin, "binary", "bin")?; + + let name = bin.name(); + + if let Some(crate_types) = bin.crate_types() { + if !crate_types.is_empty() { + errors.push(format!( + "the target `{}` is a binary and can't have any \ + crate-types set (currently \"{}\")", + name, + crate_types.join(", ") + )); + } + } + + if bin.proc_macro() == Some(true) { + errors.push(format!( + "the target `{}` is a binary and can't have `proc-macro` \ + set `true`", + name + )); + } + + if is_bad_artifact_name(&name) { + bail!("the binary target name `{}` is forbidden", name) + } + } + + validate_unique_names(&bins, "binary")?; + + let mut result = Vec::new(); + for bin in &bins { + let path = target_path(bin, &inferred, "bin", package_root, &mut |_| { + if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for binary `{}`,\n\ + please set bin.path in Cargo.toml", + legacy_path.display(), + bin.name() + )); + Some(legacy_path) + } else { + None + } + }); + let path = match path { + Ok(path) => path, + Err(e) => bail!("{}", e), + }; + + let mut target = Target::bin_target(&bin.name(), path, bin.required_features.clone()); + configure(bin, &mut target); + result.push(target); + } + return Ok(result); + + fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option { + if !has_lib { + let path = package_root.join("src").join(format!("{}.rs", name)); + if path.exists() { + return Some(path); + } + } + let path = package_root.join("src").join("main.rs"); + if path.exists() { + return Some(path); + } + + let path = package_root.join("src").join("bin").join("main.rs"); + if path.exists() { + return Some(path); + } + None + } +} + +fn clean_examples( + toml_examples: Option<&Vec>, + package_root: &Path, + errors: &mut Vec, +) -> CargoResult> { + let inferred = infer_from_directory(&package_root.join("examples")); + + let targets = clean_targets( + "example", + "example", + toml_examples, + &inferred, + package_root, + errors, + )?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let crate_types = match toml.crate_types() { + Some(kinds) => kinds.iter().map(|s| LibKind::from_str(s)).collect(), + None => Vec::new(), + }; + + let mut target = Target::example_target( + &toml.name(), + crate_types, + path, + toml.required_features.clone(), + ); + configure(&toml, &mut target); + result.push(target); + } + + Ok(result) +} + +fn clean_tests( + toml_tests: Option<&Vec>, + package_root: &Path, + errors: &mut Vec, +) -> CargoResult> { + let inferred = infer_from_directory(&package_root.join("tests")); + + let targets = clean_targets("test", "test", toml_tests, &inferred, package_root, errors)?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = Target::test_target(&toml.name(), path, toml.required_features.clone()); + configure(&toml, &mut target); + result.push(target); + } + Ok(result) +} + +fn clean_benches( + toml_benches: Option<&Vec>, + package_root: &Path, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let mut legacy_bench_path = |bench: &TomlTarget| { + let legacy_path = package_root.join("src").join("bench.rs"); + if !(bench.name() == "bench" && legacy_path.exists()) { + return None; + } + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\ + please set bench.path in Cargo.toml", + legacy_path.display(), + bench.name() + )); + Some(legacy_path) + }; + + let inferred = infer_from_directory(&package_root.join("benches")); + + let targets = clean_targets_with_legacy_path( + "benchmark", + "bench", + toml_benches, + &inferred, + package_root, + errors, + &mut legacy_bench_path, + )?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = Target::bench_target(&toml.name(), path, toml.required_features.clone()); + configure(&toml, &mut target); + result.push(target); + } + + Ok(result) +} + +fn clean_targets( + target_kind_human: &str, + target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + errors: &mut Vec, +) -> CargoResult> { + clean_targets_with_legacy_path( + target_kind_human, + target_kind, + toml_targets, + inferred, + package_root, + errors, + &mut |_| None, + ) +} + +fn clean_targets_with_legacy_path( + target_kind_human: &str, + target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + errors: &mut Vec, + legacy_path: &mut FnMut(&TomlTarget) -> Option, +) -> CargoResult> { + let toml_targets = match toml_targets { + Some(targets) => targets.clone(), + None => inferred + .iter() + .map(|&(ref name, ref path)| TomlTarget { + name: Some(name.clone()), + path: Some(PathValue(path.clone())), + ..TomlTarget::new() + }) + .collect(), + }; + + for target in &toml_targets { + validate_has_name(target, target_kind_human, target_kind)?; + } + + validate_unique_names(&toml_targets, target_kind)?; + let mut result = Vec::new(); + for target in toml_targets { + let path = target_path(&target, inferred, target_kind, package_root, legacy_path); + let path = match path { + Ok(path) => path, + Err(e) => { + errors.push(e); + continue; + } + }; + result.push((path, target)); + } + Ok(result) +} + +fn inferred_lib(package_root: &Path) -> Option { + let lib = package_root.join("src").join("lib.rs"); + if fs::metadata(&lib).is_ok() { + Some(lib) + } else { + None + } +} + +fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> { + let main = package_root.join("src").join("main.rs"); + let mut result = Vec::new(); + if main.exists() { + result.push((package_name.to_string(), main)); + } + result.extend(infer_from_directory(&package_root.join("src").join("bin"))); + + result +} + +fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> { + let entries = match fs::read_dir(directory) { + Err(_) => return Vec::new(), + Ok(dir) => dir, + }; + + entries + .filter_map(|e| e.ok()) + .filter(is_not_dotfile) + .filter_map(|d| infer_any(&d)) + .collect() +} + +fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> { + if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") { + infer_file(entry) + } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) { + infer_subdirectory(entry) + } else { + None + } +} + +fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + path.file_stem() + .and_then(|p| p.to_str()) + .map(|p| (p.to_owned(), path.clone())) +} + +fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + let main = path.join("main.rs"); + let name = path.file_name().and_then(|n| n.to_str()); + match (name, main.exists()) { + (Some(name), true) => Some((name.to_owned(), main)), + _ => None, + } +} + +fn is_not_dotfile(entry: &DirEntry) -> bool { + entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false) +} + +fn validate_has_name( + target: &TomlTarget, + target_kind_human: &str, + target_kind: &str, +) -> CargoResult<()> { + match target.name { + Some(ref name) => if name.trim().is_empty() { + bail!("{} target names cannot be empty", target_kind_human) + }, + None => bail!( + "{} target {}.name is required", + target_kind_human, + target_kind + ), + } + + Ok(()) +} + +/// Will check a list of toml targets, and make sure the target names are unique within a vector. +fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> { + let mut seen = HashSet::new(); + for name in targets.iter().map(|e| e.name()) { + if !seen.insert(name.clone()) { + bail!( + "found duplicate {target_kind} name {name}, \ + but all {target_kind} targets must have a unique name", + target_kind = target_kind, + name = name + ); + } + } + Ok(()) +} + +fn configure(toml: &TomlTarget, target: &mut Target) { + let t2 = target.clone(); + target + .set_tested(toml.test.unwrap_or_else(|| t2.tested())) + .set_doc(toml.doc.unwrap_or_else(|| t2.documented())) + .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested())) + .set_benched(toml.bench.unwrap_or_else(|| t2.benched())) + .set_harness(toml.harness.unwrap_or_else(|| t2.harness())) + .set_for_host(match (toml.plugin, toml.proc_macro()) { + (None, None) => t2.for_host(), + (Some(true), _) | (_, Some(true)) => true, + (Some(false), _) | (_, Some(false)) => false, + }); +} + +fn target_path( + target: &TomlTarget, + inferred: &[(String, PathBuf)], + target_kind: &str, + package_root: &Path, + legacy_path: &mut FnMut(&TomlTarget) -> Option, +) -> Result { + if let Some(ref path) = target.path { + // Should we verify that this path exists here? + return Ok(package_root.join(&path.0)); + } + let name = target.name(); + + let mut matching = inferred + .iter() + .filter(|&&(ref n, _)| n == &name) + .map(|&(_, ref p)| p.clone()); + + let first = matching.next(); + let second = matching.next(); + match (first, second) { + (Some(path), None) => Ok(path), + (None, None) | (Some(_), Some(_)) => { + if let Some(path) = legacy_path(target) { + return Ok(path); + } + Err(format!( + "can't find `{name}` {target_kind}, specify {target_kind}.path", + name = name, + target_kind = target_kind + )) + } + (None, Some(_)) => unreachable!(), + } +} diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs new file mode 100644 index 000000000..1eb447a59 --- /dev/null +++ b/src/cargo/util/vcs.rs @@ -0,0 +1,78 @@ +use std::path::Path; +use std::fs::create_dir; + +use git2; + +use util::{process, CargoResult}; + +pub struct HgRepo; +pub struct GitRepo; +pub struct PijulRepo; +pub struct FossilRepo; + +impl GitRepo { + pub fn init(path: &Path, _: &Path) -> CargoResult { + git2::Repository::init(path)?; + Ok(GitRepo) + } + pub fn discover(path: &Path, _: &Path) -> Result { + git2::Repository::discover(path) + } +} + +impl HgRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("hg").cwd(cwd).arg("init").arg(path).exec()?; + Ok(HgRepo) + } + pub fn discover(path: &Path, cwd: &Path) -> CargoResult { + process("hg") + .cwd(cwd) + .arg("root") + .cwd(path) + .exec_with_output()?; + Ok(HgRepo) + } +} + +impl PijulRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("pijul").cwd(cwd).arg("init").arg(path).exec()?; + Ok(PijulRepo) + } +} + +impl FossilRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + // fossil doesn't create the directory so we'll do that first + create_dir(path)?; + + // set up the paths we'll use + let db_fname = ".fossil"; + let mut db_path = path.to_owned(); + db_path.push(db_fname); + + // then create the fossil DB in that location + process("fossil").cwd(cwd).arg("init").arg(&db_path).exec()?; + + // open it in that new directory + process("fossil") + .cwd(&path) + .arg("open") + .arg(db_fname) + .exec()?; + + // set `target` as ignoreable and cleanable + process("fossil") + .cwd(cwd) + .arg("settings") + .arg("ignore-glob") + .arg("target"); + process("fossil") + .cwd(cwd) + .arg("settings") + .arg("clean-glob") + .arg("target"); + Ok(FossilRepo) + } +} diff --git a/src/crates-io/Cargo.toml b/src/crates-io/Cargo.toml new file mode 100644 index 000000000..2c8aa8f34 --- /dev/null +++ b/src/crates-io/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "crates-io" +version = "0.16.0" +authors = ["Alex Crichton "] +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-lang/cargo" +description = """ +Helpers for interacting with crates.io +""" + +[lib] +name = "crates_io" +path = "lib.rs" + +[dependencies] +curl = "0.4" +failure = "0.1.1" +serde = "1.0" +serde_derive = "1.0" +serde_json = "1.0" +url = "1.0" diff --git a/src/crates-io/LICENSE-APACHE b/src/crates-io/LICENSE-APACHE new file mode 120000 index 000000000..1cd601d0a --- /dev/null +++ b/src/crates-io/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/src/crates-io/LICENSE-MIT b/src/crates-io/LICENSE-MIT new file mode 120000 index 000000000..b2cfbdc7b --- /dev/null +++ b/src/crates-io/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file diff --git a/src/crates-io/lib.rs b/src/crates-io/lib.rs new file mode 100644 index 000000000..54cd7f242 --- /dev/null +++ b/src/crates-io/lib.rs @@ -0,0 +1,340 @@ +#![allow(unknown_lints)] + +extern crate curl; +#[macro_use] +extern crate failure; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; +extern crate url; + +use std::collections::BTreeMap; +use std::fs::File; +use std::io::prelude::*; +use std::io::Cursor; + +use curl::easy::{Easy, List}; +use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; + +pub type Result = std::result::Result; + +pub struct Registry { + host: String, + token: Option, + handle: Easy, +} + +#[derive(PartialEq, Clone, Copy)] +pub enum Auth { + Authorized, + Unauthorized, +} + +#[derive(Deserialize)] +pub struct Crate { + pub name: String, + pub description: Option, + pub max_version: String, +} + +#[derive(Serialize)] +pub struct NewCrate { + pub name: String, + pub vers: String, + pub deps: Vec, + pub features: BTreeMap>, + pub authors: Vec, + pub description: Option, + pub documentation: Option, + pub homepage: Option, + pub readme: Option, + pub readme_file: Option, + pub keywords: Vec, + pub categories: Vec, + pub license: Option, + pub license_file: Option, + pub repository: Option, + pub badges: BTreeMap>, + #[serde(default)] pub links: Option, +} + +#[derive(Serialize)] +pub struct NewCrateDependency { + pub optional: bool, + pub default_features: bool, + pub name: String, + pub features: Vec, + pub version_req: String, + pub target: Option, + pub kind: String, + #[serde(skip_serializing_if = "Option::is_none")] pub registry: Option, +} + +#[derive(Deserialize)] +pub struct User { + pub id: u32, + pub login: String, + pub avatar: Option, + pub email: Option, + pub name: Option, +} + +pub struct Warnings { + pub invalid_categories: Vec, + pub invalid_badges: Vec, +} + +#[derive(Deserialize)] +struct R { + ok: bool, +} +#[derive(Deserialize)] +struct OwnerResponse { + ok: bool, + msg: String, +} +#[derive(Deserialize)] +struct ApiErrorList { + errors: Vec, +} +#[derive(Deserialize)] +struct ApiError { + detail: String, +} +#[derive(Serialize)] +struct OwnersReq<'a> { + users: &'a [&'a str], +} +#[derive(Deserialize)] +struct Users { + users: Vec, +} +#[derive(Deserialize)] +struct TotalCrates { + total: u32, +} +#[derive(Deserialize)] +struct Crates { + crates: Vec, + meta: TotalCrates, +} +impl Registry { + pub fn new(host: String, token: Option) -> Registry { + Registry::new_handle(host, token, Easy::new()) + } + + pub fn new_handle(host: String, token: Option, handle: Easy) -> Registry { + Registry { + host, + token, + handle, + } + } + + pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.put(format!("/crates/{}/owners", krate), body.as_bytes())?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(serde_json::from_str::(&body)?.msg) + } + + pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.delete(format!("/crates/{}/owners", krate), Some(body.as_bytes()))?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn list_owners(&mut self, krate: &str) -> Result> { + let body = self.get(format!("/crates/{}/owners", krate))?; + Ok(serde_json::from_str::(&body)?.users) + } + + pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result { + let json = serde_json::to_string(krate)?; + // Prepare the body. The format of the upload request is: + // + // + // (metadata for the package) + // + // + let stat = tarball.metadata()?; + let header = { + let mut w = Vec::new(); + w.extend( + [ + (json.len() >> 0) as u8, + (json.len() >> 8) as u8, + (json.len() >> 16) as u8, + (json.len() >> 24) as u8, + ].iter() + .map(|x| *x), + ); + w.extend(json.as_bytes().iter().map(|x| *x)); + w.extend( + [ + (stat.len() >> 0) as u8, + (stat.len() >> 8) as u8, + (stat.len() >> 16) as u8, + (stat.len() >> 24) as u8, + ].iter() + .map(|x| *x), + ); + w + }; + let size = stat.len() as usize + header.len(); + let mut body = Cursor::new(header).chain(tarball); + + let url = format!("{}/api/v1/crates/new", self.host); + + let token = match self.token.as_ref() { + Some(s) => s, + None => bail!("no upload token found, please run `cargo login`"), + }; + self.handle.put(true)?; + self.handle.url(&url)?; + self.handle.in_filesize(size as u64)?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append(&format!("Authorization: {}", token))?; + self.handle.http_headers(headers)?; + + let body = handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0))?; + + let response = if body.len() > 0 { + body.parse::()? + } else { + "{}".parse()? + }; + + let invalid_categories: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_categories")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + let invalid_badges: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_badges")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + Ok(Warnings { + invalid_categories, + invalid_badges, + }) + } + + pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec, u32)> { + let formatted_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET); + let body = self.req( + format!("/crates?q={}&per_page={}", formatted_query, limit), + None, + Auth::Unauthorized, + )?; + + let crates = serde_json::from_str::(&body)?; + Ok((crates.crates, crates.meta.total)) + } + + pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.delete(format!("/crates/{}/{}/yank", krate, version), None)?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.put(format!("/crates/{}/{}/unyank", krate, version), &[])?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + fn put(&mut self, path: String, b: &[u8]) -> Result { + self.handle.put(true)?; + self.req(path, Some(b), Auth::Authorized) + } + + fn get(&mut self, path: String) -> Result { + self.handle.get(true)?; + self.req(path, None, Auth::Authorized) + } + + fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result { + self.handle.custom_request("DELETE")?; + self.req(path, b, Auth::Authorized) + } + + fn req(&mut self, path: String, body: Option<&[u8]>, authorized: Auth) -> Result { + self.handle.url(&format!("{}/api/v1{}", self.host, path))?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append("Content-Type: application/json")?; + + if authorized == Auth::Authorized { + let token = match self.token.as_ref() { + Some(s) => s, + None => bail!("no upload token found, please run `cargo login`"), + }; + headers.append(&format!("Authorization: {}", token))?; + } + self.handle.http_headers(headers)?; + match body { + Some(mut body) => { + self.handle.upload(true)?; + self.handle.in_filesize(body.len() as u64)?; + handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0)) + } + None => handle(&mut self.handle, &mut |_| 0), + } + } +} + +fn handle(handle: &mut Easy, read: &mut FnMut(&mut [u8]) -> usize) -> Result { + let mut headers = Vec::new(); + let mut body = Vec::new(); + { + let mut handle = handle.transfer(); + handle.read_function(|buf| Ok(read(buf)))?; + handle.write_function(|data| { + body.extend_from_slice(data); + Ok(data.len()) + })?; + handle.header_function(|data| { + headers.push(String::from_utf8_lossy(data).into_owned()); + true + })?; + handle.perform()?; + } + + match handle.response_code()? { + 0 => {} // file upload url sometimes + 200 => {} + 403 => bail!("received 403 unauthorized response code"), + 404 => bail!("received 404 not found response code"), + code => bail!( + "failed to get a 200 OK response, got {}\n\ + headers:\n\ + \t{}\n\ + body:\n\ + {}", + code, + headers.join("\n\t"), + String::from_utf8_lossy(&body) + ), + } + + let body = match String::from_utf8(body) { + Ok(body) => body, + Err(..) => bail!("response body was not valid utf-8"), + }; + match serde_json::from_str::(&body) { + Ok(errors) => { + let errors = errors.errors.into_iter().map(|s| s.detail); + bail!("api errors: {}", errors.collect::>().join(", ")) + } + Err(..) => {} + } + Ok(body) +} diff --git a/src/doc/README.md b/src/doc/README.md new file mode 100644 index 000000000..983c96693 --- /dev/null +++ b/src/doc/README.md @@ -0,0 +1,47 @@ +# The Cargo Book + + +### Requirements + +Building the book requires [mdBook]. To get it: + +[mdBook]: https://github.com/azerupi/mdBook + +```console +$ cargo install mdbook +``` + +### Building + +To build the book: + +```console +$ mdbook build +``` + +The output will be in the `book` subdirectory. To check it out, open it in +your web browser. + +_Firefox:_ +```console +$ firefox book/index.html # Linux +$ open -a "Firefox" book/index.html # OS X +$ Start-Process "firefox.exe" .\book\index.html # Windows (PowerShell) +$ start firefox.exe .\book\index.html # Windows (Cmd) +``` + +_Chrome:_ +```console +$ google-chrome book/index.html # Linux +$ open -a "Google Chrome" book/index.html # OS X +$ Start-Process "chrome.exe" .\book\index.html # Windows (PowerShell) +$ start chrome.exe .\book\index.html # Windows (Cmd) +``` + + +## Contributing + +Given that the book is still in a draft state, we'd love your help! Please feel free to open +issues about anything, and send in PRs for things you'd like to fix or change. If your change is +large, please open an issue first, so we can make sure that it's something we'd accept before you +go through the work of getting a PR together. diff --git a/src/doc/book.toml b/src/doc/book.toml new file mode 100644 index 000000000..1f21e1e2e --- /dev/null +++ b/src/doc/book.toml @@ -0,0 +1,2 @@ +title = "The Cargo Book" +author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community" diff --git a/src/doc/src/SUMMARY.md b/src/doc/src/SUMMARY.md new file mode 100644 index 000000000..f75ac510e --- /dev/null +++ b/src/doc/src/SUMMARY.md @@ -0,0 +1,31 @@ +# Summary + +[Introduction](index.md) + +* [Getting Started](getting-started/index.md) + * [Installation](getting-started/installation.md) + * [First Steps with Cargo](getting-started/first-steps.md) + +* [Cargo Guide](guide/index.md) + * [Why Cargo Exists](guide/why-cargo-exists.md) + * [Creating a New Project](guide/creating-a-new-project.md) + * [Working on an Existing Project](guide/working-on-an-existing-project.md) + * [Dependencies](guide/dependencies.md) + * [Project Layout](guide/project-layout.md) + * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md) + * [Tests](guide/tests.md) + * [Continuous Integration](guide/continuous-integration.md) + * [Build Cache](guide/build-cache.md) + +* [Cargo Reference](reference/index.md) + * [Specifying Dependencies](reference/specifying-dependencies.md) + * [The Manifest Format](reference/manifest.md) + * [Configuration](reference/config.md) + * [Environment Variables](reference/environment-variables.md) + * [Build Scripts](reference/build-scripts.md) + * [Publishing on crates.io](reference/publishing.md) + * [Package ID Specifications](reference/pkgid-spec.md) + * [Source Replacement](reference/source-replacement.md) + * [External Tools](reference/external-tools.md) + +* [FAQ](faq.md) diff --git a/src/doc/src/faq.md b/src/doc/src/faq.md new file mode 100644 index 000000000..7ee2d2b7c --- /dev/null +++ b/src/doc/src/faq.md @@ -0,0 +1,193 @@ +## Frequently Asked Questions + +### Is the plan to use GitHub as a package repository? + +No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with +npmjs.org and rubygems.org. + +We plan to support git repositories as a source of packages forever, +because they can be used for early development and temporary patches, +even when people use the registry as the primary source of packages. + +### Why build crates.io rather than use GitHub as a registry? + +We think that it’s very important to support multiple ways to download +packages, including downloading from GitHub and copying packages into +your project itself. + +That said, we think that [crates.io] offers a number of important benefits, and +will likely become the primary way that people download packages in Cargo. + +For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a +central registry model as well as a Git-based model, and most packages +are downloaded through the registry in those ecosystems, with an +important minority of packages making use of git-based packages. + +[1]: https://www.npmjs.org +[2]: https://bundler.io + +Some of the advantages that make a central registry popular in other +languages include: + +* **Discoverability**. A central registry provides an easy place to look + for existing packages. Combined with tagging, this also makes it + possible for a registry to provide ecosystem-wide information, such as a + list of the most popular or most-depended-on packages. +* **Speed**. A central registry makes it possible to easily fetch just + the metadata for packages quickly and efficiently, and then to + efficiently download just the published package, and not other bloat + that happens to exist in the repository. This adds up to a significant + improvement in the speed of dependency resolution and fetching. As + dependency graphs scale up, downloading all of the git repositories bogs + down fast. Also remember that not everybody has a high-speed, + low-latency Internet connection. + +### Will Cargo work with C code (or other languages)? + +Yes! + +Cargo handles compiling Rust code, but we know that many Rust projects +link against C code. We also know that there are decades of tooling +built up around compiling languages other than Rust. + +Our solution: Cargo allows a package to [specify a script](reference/build-scripts.html) +(written in Rust) to run before invoking `rustc`. Rust is leveraged to +implement platform-specific configuration and refactor out common build +functionality among packages. + +### Can Cargo be used inside of `make` (or `ninja`, or ...) + +Indeed. While we intend Cargo to be useful as a standalone way to +compile Rust projects at the top-level, we know that some people will +want to invoke Cargo from other build tools. + +We have designed Cargo to work well in those contexts, paying attention +to things like error codes and machine-readable output modes. We still +have some work to do on those fronts, but using Cargo in the context of +conventional scripts is something we designed for from the beginning and +will continue to prioritize. + +### Does Cargo handle multi-platform projects or cross-compilation? + +Rust itself provides facilities for configuring sections of code based +on the platform. Cargo also supports [platform-specific +dependencies][target-deps], and we plan to support more per-platform +configuration in `Cargo.toml` in the future. + +[target-deps]: reference/specifying-dependencies.html#platform-specific-dependencies + +In the longer-term, we’re looking at ways to conveniently cross-compile +projects using Cargo. + +### Does Cargo support environments, like `production` or `test`? + +We support environments through the use of [profiles][profile] to support: + +[profile]: reference/manifest.html#the-profile-sections + +* environment-specific flags (like `-g --opt-level=0` for development + and `--opt-level=3` for production). +* environment-specific dependencies (like `hamcrest` for test assertions). +* environment-specific `#[cfg]` +* a `cargo test` command + +### Does Cargo work on Windows? + +Yes! + +All commits to Cargo are required to pass the local test suite on Windows. +If, however, you find a Windows issue, we consider it a bug, so [please file an +issue][3]. + +[3]: https://github.com/rust-lang/cargo/issues + +### Why do binaries have `Cargo.lock` in version control, but not libraries? + +The purpose of a `Cargo.lock` is to describe the state of the world at the time +of a successful build. It is then used to provide deterministic builds across +whatever machine is building the project by ensuring that the exact same +dependencies are being compiled. + +This property is most desirable from applications and projects which are at the +very end of the dependency chain (binaries). As a result, it is recommended that +all binaries check in their `Cargo.lock`. + +For libraries the situation is somewhat different. A library is not only used by +the library developers, but also any downstream consumers of the library. Users +dependent on the library will not inspect the library’s `Cargo.lock` (even if it +exists). This is precisely because a library should **not** be deterministically +recompiled for all users of the library. + +If a library ends up being used transitively by several dependencies, it’s +likely that just a single copy of the library is desired (based on semver +compatibility). If all libraries were to check in their `Cargo.lock`, then +multiple copies of the library would be used, and perhaps even a version +conflict. + +In other words, libraries specify semver requirements for their dependencies but +cannot see the full picture. Only end products like binaries have a full +picture to decide what versions of dependencies should be used. + +### Can libraries use `*` as a version for their dependencies? + +**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) +with wildcard dependency constraints.** + +While libraries _can_, strictly speaking, they should not. A version requirement +of `*` says “This will work with every version ever,” which is never going +to be true. Libraries should always specify the range that they do work with, +even if it’s something as general as “every 1.x.y version.” + +### Why `Cargo.toml`? + +As one of the most frequent interactions with Cargo, the question of why the +configuration file is named `Cargo.toml` arises from time to time. The leading +capital-`C` was chosen to ensure that the manifest was grouped with other +similar configuration files in directory listings. Sorting files often puts +capital letters before lowercase letters, ensuring files like `Makefile` and +`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize +the fact that the file is in the [TOML configuration +format](https://github.com/toml-lang/toml). + +Cargo does not allow other names such as `cargo.toml` or `Cargofile` to +emphasize the ease of how a Cargo repository can be identified. An option of +many possible names has historically led to confusion where one case was handled +but others were accidentally forgotten. + +[crates.io]: https://crates.io/ + +### How can Cargo work offline? + +Cargo is often used in situations with limited or no network access such as +airplanes, CI environments, or embedded in large production deployments. Users +are often surprised when Cargo attempts to fetch resources from the network, and +hence the request for Cargo to work offline comes up frequently. + +Cargo, at its heart, will not attempt to access the network unless told to do +so. That is, if no crates comes from crates.io, a git repository, or some other +network location, Cargo will never attempt to make a network connection. As a +result, if Cargo attempts to touch the network, then it's because it needs to +fetch a required resource. + +Cargo is also quite aggressive about caching information to minimize the amount +of network activity. It will guarantee, for example, that if `cargo build` (or +an equivalent) is run to completion then the next `cargo build` is guaranteed to +not touch the network so long as `Cargo.toml` has not been modified in the +meantime. This avoidance of the network boils down to a `Cargo.lock` existing +and a populated cache of the crates reflected in the lock file. If either of +these components are missing, then they're required for the build to succeed and +must be fetched remotely. + +As of Rust 1.11.0 Cargo understands a new flag, `--frozen`, which is an +assertion that it shouldn't touch the network. When passed, Cargo will +immediately return an error if it would otherwise attempt a network request. +The error should include contextual information about why the network request is +being made in the first place to help debug as well. Note that this flag *does +not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch +the network as a previous command has been run to ensure that network activity +shouldn't be necessary. + +For more information about vendoring, see documentation on [source +replacement][replace]. + +[replace]: reference/source-replacement.html diff --git a/src/doc/src/getting-started/first-steps.md b/src/doc/src/getting-started/first-steps.md new file mode 100644 index 000000000..3a0bad356 --- /dev/null +++ b/src/doc/src/getting-started/first-steps.md @@ -0,0 +1,70 @@ +## First Steps with Cargo + +To start a new project with Cargo, use `cargo new`: + +```console +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d pass `--lib`. + +Let’s check out what Cargo has generated for us: + +```console +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +This is all we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```console +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```console +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step: + +```console +$ cargo run + Fresh hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/hello_world` +Hello, world! +``` + +### Going further + +For more details on using Cargo, check out the [Cargo Guide](guide/index.html) diff --git a/src/doc/src/getting-started/index.md b/src/doc/src/getting-started/index.md new file mode 100644 index 000000000..22a7315cf --- /dev/null +++ b/src/doc/src/getting-started/index.md @@ -0,0 +1,6 @@ +## Getting Started + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +* [Installation](getting-started/installation.html) +* [First steps with Cargo](getting-started/first-steps.html) diff --git a/src/doc/src/getting-started/installation.md b/src/doc/src/getting-started/installation.md new file mode 100644 index 000000000..186c9daa5 --- /dev/null +++ b/src/doc/src/getting-started/installation.md @@ -0,0 +1,37 @@ +## Installation + +### Install Rust and Cargo + +The easiest way to get Cargo is to install the current stable release of [Rust] +by using `rustup`. + +On Linux and macOS systems, this is done as follows: + +```console +$ curl -sSf https://static.rust-lang.org/rustup.sh | sh +``` + +It will download a script, and start the installation. If everything goes well, +you’ll see this appear: + +```console +Rust is installed now. Great! +``` + +On Windows, download and run [rustup-init.exe]. It will start the installation +in a console and present the above message on success. + +After this, you can use the `rustup` command to also install `beta` or `nightly` +channels for Rust and Cargo. + +For other installation options and information, visit the +[install][install-rust] page of the Rust website. + +### Build and Install Cargo from Source + +Alternatively, you can [build Cargo from source][compiling-from-source]. + +[rust]: https://www.rust-lang.org/ +[rustup-init.exe]: https://win.rustup.rs/ +[install-rust]: https://www.rust-lang.org/install.html +[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source diff --git a/src/doc/src/guide/build-cache.md b/src/doc/src/guide/build-cache.md new file mode 100644 index 000000000..d253b8acc --- /dev/null +++ b/src/doc/src/guide/build-cache.md @@ -0,0 +1,14 @@ +## Build cache + +Cargo shares build artifacts among all the packages of a single workspace. +Today, Cargo does not share build results across different workspaces, but +a similar result can be achieved by using a third party tool, [sccache]. + +To setup `sccache`, install it with `cargo install sccache` and set +`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. +If you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to +`.bashrc`. Refer to sccache documentation for more details. + +[sccache]: https://github.com/mozilla/sccache + + diff --git a/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/src/doc/src/guide/cargo-toml-vs-cargo-lock.md new file mode 100644 index 000000000..66d52459c --- /dev/null +++ b/src/doc/src/guide/cargo-toml-vs-cargo-lock.md @@ -0,0 +1,103 @@ +## Cargo.toml vs Cargo.lock + +`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk +about them, here’s a summary: + +* `Cargo.toml` is about describing your dependencies in a broad sense, and is + written by you. +* `Cargo.lock` contains exact information about your dependencies. It is + maintained by Cargo and should not be manually edited. + +If you’re building a library that other projects will depend on, put +`Cargo.lock` in your `.gitignore`. If you’re building an executable like a +command-line tool or an application, check `Cargo.lock` into `git`. If you're +curious about why that is, see ["Why do binaries have `Cargo.lock` in version +control, but not libraries?" in the +FAQ](faq.html#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). + +Let’s dig in a little bit more. + +`Cargo.toml` is a **manifest** file in which we can specify a bunch of +different metadata about our project. For example, we can say that we depend +on another project: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +This project has a single dependency, on the `rand` library. We’ve stated in +this case that we’re relying on a particular Git repository that lives on +GitHub. Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. + +Sound good? Well, there’s one problem: If you build this project today, and +then you send a copy to me, and I build this project tomorrow, something bad +could happen. There could be more commits to `rand` in the meantime, and my +build would include new commits while yours would not. Therefore, we would +get different builds. This would be bad because we want reproducible builds. + +We could fix this problem by putting a `rev` line in our `Cargo.toml`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git", rev = "9f35b8e" } +``` + +Now our builds will be the same. But there’s a big drawback: now we have to +manually think about SHA-1s every time we want to update our library. This is +both tedious and error prone. + +Enter the `Cargo.lock`. Because of its existence, we don’t need to manually +keep track of the exact revisions: Cargo will do it for us. When we have a +manifest like this: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +Cargo will take the latest commit and write that information out into our +`Cargo.lock` when we build for the first time. That file will look like this: + +```toml +[[package]] +name = "hello_world" +version = "0.1.0" +dependencies = [ + "rand 0.1.0 (git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9)", +] + +[[package]] +name = "rand" +version = "0.1.0" +source = "git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9" +``` + +You can see that there’s a lot more information here, including the exact +revision we used to build. Now when you give your project to someone else, +they’ll use the exact same SHA, even though we didn’t specify it in our +`Cargo.toml`. + +When we’re ready to opt in to a new version of the library, Cargo can +re-calculate the dependencies and update things for us: + +```console +$ cargo update # updates all dependencies +$ cargo update -p rand # updates just “rand” +``` + +This will write out a new `Cargo.lock` with the new version information. Note +that the argument to `cargo update` is actually a +[Package ID Specification](reference/pkgid-spec.html) and `rand` is just a short +specification. diff --git a/src/doc/src/guide/continuous-integration.md b/src/doc/src/guide/continuous-integration.md new file mode 100644 index 000000000..6e5efe72c --- /dev/null +++ b/src/doc/src/guide/continuous-integration.md @@ -0,0 +1,21 @@ +## Continuous Integration + +### Travis CI + +To test your project on Travis CI, here is a sample `.travis.yml` file: + +```yaml +language: rust +rust: + - stable + - beta + - nightly +matrix: + allow_failures: + - rust: nightly +``` + +This will test all three release channels, but any breakage in nightly +will not fail your overall build. Please see the [Travis CI Rust +documentation](https://docs.travis-ci.com/user/languages/rust/) for more +information. diff --git a/src/doc/src/guide/creating-a-new-project.md b/src/doc/src/guide/creating-a-new-project.md new file mode 100644 index 000000000..98f2a65d7 --- /dev/null +++ b/src/doc/src/guide/creating-a-new-project.md @@ -0,0 +1,87 @@ +## Creating a New Project + +To start a new project with Cargo, use `cargo new`: + +```console +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d pass `--lib`. This also initializes a new `git` +repository by default. If you don't want it to do that, pass `--vcs none`. + +Let’s check out what Cargo has generated for us: + +```console +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +Let’s take a closer look at `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your project. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```console +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +And then run it: + +```console +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step (You +won't see the `Compiling` line if you have not made any changes since you last +compiled): + +```console +$ cargo run + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running `target/debug/hello_world` +Hello, world! +``` + +You’ll now notice a new file, `Cargo.lock`. It contains information about our +dependencies. Since we don’t have any yet, it’s not very interesting. + +Once you’re ready for release, you can use `cargo build --release` to compile +your files with optimizations turned on: + +```console +$ cargo build --release + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +`cargo build --release` puts the resulting binary in `target/release` instead of +`target/debug`. + +Compiling in debug mode is the default for development-- compilation time is +shorter since the compiler doesn't do optimizations, but the code will run +slower. Release mode takes longer to compile, but the code will run faster. diff --git a/src/doc/src/guide/dependencies.md b/src/doc/src/guide/dependencies.md new file mode 100644 index 000000000..5b03a133c --- /dev/null +++ b/src/doc/src/guide/dependencies.md @@ -0,0 +1,90 @@ +## Dependencies + +[crates.io] is the Rust community's central package registry that serves as a +location to discover and download packages. `cargo` is configured to use it by +default to find requested packages. + +To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. + +[crates.io]: https://crates.io/ + +### Adding a dependency + +If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that, +then list the crate name and version that you would like to use. This example +adds a dependency of the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The version string is a [semver] version requirement. The [specifying +dependencies](reference/specifying-dependencies.html) docs have more information about +the options you have here. + +[semver]: https://github.com/steveklabnik/semver#requirements + +If we also wanted to add a dependency on the `regex` crate, we would not need +to add `[dependencies]` for each crate listed. Here's what your whole +`Cargo.toml` file would look like with dependencies on the `time` and `regex` +crates: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +time = "0.1.12" +regex = "0.1.41" +``` + +Re-run `cargo build`, and Cargo will fetch the new dependencies and all of +their dependencies, compile them all, and update the `Cargo.lock`: + +```console +$ cargo build + Updating registry `https://github.com/rust-lang/crates.io-index` + Downloading memchr v0.1.5 + Downloading libc v0.1.10 + Downloading regex-syntax v0.2.1 + Downloading memchr v0.1.5 + Downloading aho-corasick v0.3.0 + Downloading regex v0.1.41 + Compiling memchr v0.1.5 + Compiling libc v0.1.10 + Compiling regex-syntax v0.2.1 + Compiling memchr v0.1.5 + Compiling aho-corasick v0.3.0 + Compiling regex v0.1.41 + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) +``` + +Our `Cargo.lock` contains the exact information about which revision of all of +these dependencies we used. + +Now, if `regex` gets updated, we will still build with the same revision until +we choose to `cargo update`. + +You can now use the `regex` library using `extern crate` in `main.rs`. + +```rust +extern crate regex; + +use regex::Regex; + +fn main() { + let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); + println!("Did our date match? {}", re.is_match("2014-01-01")); +} +``` + +Running it will show: + +```console +$ cargo run + Running `target/hello_world` +Did our date match? true +``` diff --git a/src/doc/src/guide/index.md b/src/doc/src/guide/index.md new file mode 100644 index 000000000..c8a61b28d --- /dev/null +++ b/src/doc/src/guide/index.md @@ -0,0 +1,14 @@ +## Cargo Guide + +This guide will give you all that you need to know about how to use Cargo to +develop Rust projects. + +* [Why Cargo Exists](guide/why-cargo-exists.html) +* [Creating a New Project](guide/creating-a-new-project.html) +* [Working on an Existing Cargo Project](guide/working-on-an-existing-project.html) +* [Dependencies](guide/dependencies.html) +* [Project Layout](guide/project-layout.html) +* [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.html) +* [Tests](guide/tests.html) +* [Continuous Integration](guide/continuous-integration.html) +* [Build Cache](guide/build-cache.html) diff --git a/src/doc/src/guide/project-layout.md b/src/doc/src/guide/project-layout.md new file mode 100644 index 000000000..300c6e5c9 --- /dev/null +++ b/src/doc/src/guide/project-layout.md @@ -0,0 +1,35 @@ +## Project Layout + +Cargo uses conventions for file placement to make it easy to dive into a new +Cargo project: + +``` +. +├── Cargo.lock +├── Cargo.toml +├── benches +│   └── large-input.rs +├── examples +│   └── simple.rs +├── src +│   ├── bin +│   │   └── another_executable.rs +│   ├── lib.rs +│   └── main.rs +└── tests + └── some-integration-tests.rs +``` + +* `Cargo.toml` and `Cargo.lock` are stored in the root of your project (*package + root*). +* Source code goes in the `src` directory. +* The default library file is `src/lib.rs`. +* The default executable file is `src/main.rs`. +* Other executables can be placed in `src/bin/*.rs`. +* Integration tests go in the `tests` directory (unit tests go in each file + they're testing). +* Examples go in the `examples` directory. +* Benchmarks go in the `benches` directory. + +These are explained in more detail in the [manifest +description](reference/manifest.html#the-project-layout). diff --git a/src/doc/src/guide/tests.md b/src/doc/src/guide/tests.md new file mode 100644 index 000000000..95d1c2d3a --- /dev/null +++ b/src/doc/src/guide/tests.md @@ -0,0 +1,39 @@ +## Tests + +Cargo can run your tests with the `cargo test` command. Cargo looks for tests +to run in two places: in each of your `src` files and any tests in `tests/`. +Tests in your `src` files should be unit tests, and tests in `tests/` should be +integration-style tests. As such, you’ll need to import your crates into +the files in `tests`. + +Here's an example of running `cargo test` in our project, which currently has +no tests: + +```console +$ cargo test + Compiling rand v0.1.0 (https://github.com/rust-lang-nursery/rand.git#9f35b8e) + Compiling hello_world v0.1.0 (file:///path/to/project/hello_world) + Running target/test/hello_world-9c2b65bbb79eabce + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` + +If our project had tests, we would see more output with the correct number of +tests. + +You can also run a specific test by passing a filter: + +```console +$ cargo test foo +``` + +This will run any test with `foo` in its name. + +`cargo test` runs additional checks as well. For example, it will compile any +examples you’ve included and will also test the examples in your +documentation. Please see the [testing guide][testing] in the Rust +documentation for more details. + +[testing]: https://doc.rust-lang.org/book/testing.html diff --git a/src/doc/src/guide/why-cargo-exists.md b/src/doc/src/guide/why-cargo-exists.md new file mode 100644 index 000000000..9c5d0d2dd --- /dev/null +++ b/src/doc/src/guide/why-cargo-exists.md @@ -0,0 +1,12 @@ +## Why Cargo Exists + +Cargo is a tool that allows Rust projects to declare their various +dependencies and ensure that you’ll always get a repeatable build. + +To accomplish this goal, Cargo does four things: + +* Introduces two metadata files with various bits of project information. +* Fetches and builds your project’s dependencies. +* Invokes `rustc` or another build tool with the correct parameters to build + your project. +* Introduces conventions to make working with Rust projects easier. diff --git a/src/doc/src/guide/working-on-an-existing-project.md b/src/doc/src/guide/working-on-an-existing-project.md new file mode 100644 index 000000000..34ee6c5a8 --- /dev/null +++ b/src/doc/src/guide/working-on-an-existing-project.md @@ -0,0 +1,22 @@ +## Working on an Existing Cargo Project + +If you download an existing project that uses Cargo, it’s really easy +to get going. + +First, get the project from somewhere. In this example, we’ll use `rand` +cloned from its repository on GitHub: + +```console +$ git clone https://github.com/rust-lang-nursery/rand.git +$ cd rand +``` + +To build, use `cargo build`: + +```console +$ cargo build + Compiling rand v0.1.0 (file:///path/to/project/rand) +``` + +This will fetch all of the dependencies and then build them, along with the +project. diff --git a/src/doc/src/images/Cargo-Logo-Small.png b/src/doc/src/images/Cargo-Logo-Small.png new file mode 100644 index 000000000..e3a99208c Binary files /dev/null and b/src/doc/src/images/Cargo-Logo-Small.png differ diff --git a/src/doc/src/images/auth-level-acl.png b/src/doc/src/images/auth-level-acl.png new file mode 100644 index 000000000..e7bc25180 Binary files /dev/null and b/src/doc/src/images/auth-level-acl.png differ diff --git a/src/doc/src/images/org-level-acl.png b/src/doc/src/images/org-level-acl.png new file mode 100644 index 000000000..ed5aa882a Binary files /dev/null and b/src/doc/src/images/org-level-acl.png differ diff --git a/src/doc/src/index.md b/src/doc/src/index.md new file mode 100644 index 000000000..e3fc99c3a --- /dev/null +++ b/src/doc/src/index.md @@ -0,0 +1,28 @@ +# The Cargo Book + +![Cargo Logo](images/Cargo-Logo-Small.png) + +Cargo is the [Rust] *package manager*. Cargo downloads your Rust project’s +dependencies, compiles your project, makes packages, and upload them to +[crates.io], the Rust community’s *package registry*. + + +### Sections + +**[Getting Started](getting-started/index.html)** + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +**[Cargo Guide](guide/index.html)** + +The guide will give you all you need to know about how to use Cargo to develop +Rust projects. + +**[Cargo Reference](reference/index.html)** + +The reference covers the details of various areas of Cargo. + +**[Frequently Asked Questions](faq.html)** + +[rust]: https://www.rust-lang.org/ +[crates.io]: https://crates.io/ diff --git a/src/doc/src/reference/build-scripts.md b/src/doc/src/reference/build-scripts.md new file mode 100644 index 000000000..35a6d9ea7 --- /dev/null +++ b/src/doc/src/reference/build-scripts.md @@ -0,0 +1,565 @@ +## Build Scripts + +Some packages need to compile third-party non-Rust code, for example C +libraries. Other packages need to link to C libraries which can either be +located on the system or possibly need to be built from source. Others still +need facilities for functionality such as code generation before building (think +parser generators). + +Cargo does not aim to replace other tools that are well-optimized for +these tasks, but it does integrate with them with the `build` configuration +option. + +```toml +[package] +# ... +build = "build.rs" +``` + +The Rust file designated by the `build` command (relative to the package root) +will be compiled and invoked before anything else is compiled in the package, +allowing your Rust code to depend on the built or generated artifacts. +By default Cargo looks up for `"build.rs"` file in a package root (even if you +do not specify a value for `build`). Use `build = "custom_build_name.rs"` to specify +a custom build name or `build = false` to disable automatic detection of the build script. + +Some example use cases of the build command are: + +* Building a bundled C library. +* Finding a C library on the host system. +* Generating a Rust module from a specification. +* Performing any platform-specific configuration needed for the crate. + +Each of these use cases will be detailed in full below to give examples of how +the build command works. + +### Inputs to the Build Script + +When the build script is run, there are a number of inputs to the build script, +all passed in the form of [environment variables][env]. + +In addition to environment variables, the build script’s current directory is +the source directory of the build script’s package. + +[env]: reference/environment-variables.html + +### Outputs of the Build Script + +All the lines printed to stdout by a build script are written to a file like +`target/debug/build//output` (the precise location may depend on your +configuration). If you would like to see such output directly in your terminal, +invoke cargo as 'very verbose' with the `-vv` flag. Note that if neither the +build script nor project source files are modified, subsequent calls to +cargo with `-vv` will **not** print output to the terminal because a +new build is not executed. Run `cargo clean` before each cargo invocation +if you want to ensure that output is always displayed on your terminal. +Any line that starts with `cargo:` is interpreted directly by Cargo. +This line must be of the form `cargo:key=value`, like the examples below: + +``` +# specially recognized by Cargo +cargo:rustc-link-lib=static=foo +cargo:rustc-link-search=native=/path/to/foo +cargo:rustc-cfg=foo +cargo:rustc-env=FOO=bar +# arbitrary user-defined metadata +cargo:root=/path/to/foo +cargo:libdir=/path/to/foo/lib +cargo:include=/path/to/foo/include +``` + +On the other hand, lines printed to stderr are written to a file like +`target/debug/build//stderr` but are not interpreted by cargo. + +There are a few special keys that Cargo recognizes, some affecting how the +crate is built: + +* `rustc-link-lib=[KIND=]NAME` indicates that the specified value is a library + name and should be passed to the compiler as a `-l` flag. The optional `KIND` + can be one of `static`, `dylib` (the default), or `framework`, see + `rustc --help` for more details. +* `rustc-link-search=[KIND=]PATH` indicates the specified value is a library + search path and should be passed to the compiler as a `-L` flag. The optional + `KIND` can be one of `dependency`, `crate`, `native`, `framework` or `all` + (the default), see `rustc --help` for more details. +* `rustc-flags=FLAGS` is a set of flags passed to the compiler, only `-l` and + `-L` flags are supported. +* `rustc-cfg=FEATURE` indicates that the specified feature will be passed as a + `--cfg` flag to the compiler. This is often useful for performing compile-time + detection of various features. +* `rustc-env=VAR=VALUE` indicates that the specified environment variable + will be added to the environment which the compiler is run within. + The value can be then retrieved by the `env!` macro in the compiled crate. + This is useful for embedding additional metadata in crate's code, + such as the hash of Git HEAD or the unique identifier of a continuous + integration server. +* `rerun-if-changed=PATH` is a path to a file or directory which indicates that + the build script should be re-run if it changes (detected by a more-recent + last-modified timestamp on the file). Normally build scripts are re-run if + any file inside the crate root changes, but this can be used to scope changes + to just a small set of files. (If this path points to a directory the entire + directory will not be traversed for changes -- only changes to the timestamp + of the directory itself (which corresponds to some types of changes within the + directory, depending on platform) will trigger a rebuild. To request a re-run + on any changes within an entire directory, print a line for the directory and + another line for everything inside it, recursively.) + Note that if the build script itself (or one of its dependencies) changes, + then it's rebuilt and rerun unconditionally, so + `cargo:rerun-if-changed=build.rs` is almost always redundant (unless you + want to ignore changes in all other files except for `build.rs`). +* `rerun-if-env-changed=VAR` is the name of an environment variable which + indicates that if the environment variable's value changes the build script + should be rerun. This basically behaves the same as `rerun-if-changed` except + that it works with environment variables instead. Note that the environment + variables here are intended for global environment variables like `CC` and + such, it's not necessary to use this for env vars like `TARGET` that Cargo + sets. Also note that if `rerun-if-env-changed` is printed out then Cargo will + *only* rerun the build script if those environment variables change or if + files printed out by `rerun-if-changed` change. + +* `warning=MESSAGE` is a message that will be printed to the main console after + a build script has finished running. Warnings are only shown for path + dependencies (that is, those you're working on locally), so for example + warnings printed out in crates.io crates are not emitted by default. + +Any other element is a user-defined metadata that will be passed to +dependents. More information about this can be found in the [`links`][links] +section. + +[links]: #the-links-manifest-key + +### Build Dependencies + +Build scripts are also allowed to have dependencies on other Cargo-based crates. +Dependencies are declared through the `build-dependencies` section of the +manifest. + +```toml +[build-dependencies] +foo = { git = "https://github.com/your-packages/foo" } +``` + +The build script **does not** have access to the dependencies listed in the +`dependencies` or `dev-dependencies` section (they’re not built yet!). All build +dependencies will also not be available to the package itself unless explicitly +stated as so. + +### The `links` Manifest Key + +In addition to the manifest key `build`, Cargo also supports a `links` manifest +key to declare the name of a native library that is being linked to: + +```toml +[package] +# ... +links = "foo" +build = "build.rs" +``` + +This manifest states that the package links to the `libfoo` native library, and +it also has a build script for locating and/or building the library. Cargo +requires that a `build` command is specified if a `links` entry is also +specified. + +The purpose of this manifest key is to give Cargo an understanding about the set +of native dependencies that a package has, as well as providing a principled +system of passing metadata between package build scripts. + +Primarily, Cargo requires that there is at most one package per `links` value. +In other words, it’s forbidden to have two packages link to the same native +library. Note, however, that there are [conventions in place][star-sys] to +alleviate this. + +[star-sys]: #-sys-packages + +As mentioned above in the output format, each build script can generate an +arbitrary set of metadata in the form of key-value pairs. This metadata is +passed to the build scripts of **dependent** packages. For example, if `libbar` +depends on `libfoo`, then if `libfoo` generates `key=value` as part of its +metadata, then the build script of `libbar` will have the environment variables +`DEP_FOO_KEY=value`. + +Note that metadata is only passed to immediate dependents, not transitive +dependents. The motivation for this metadata passing is outlined in the linking +to system libraries case study below. + +### Overriding Build Scripts + +If a manifest contains a `links` key, then Cargo supports overriding the build +script specified with a custom library. The purpose of this functionality is to +prevent running the build script in question altogether and instead supply the +metadata ahead of time. + +To override a build script, place the following configuration in any acceptable +Cargo [configuration location](reference/config.html). + +```toml +[target.x86_64-unknown-linux-gnu.foo] +rustc-link-search = ["/path/to/foo"] +rustc-link-lib = ["foo"] +root = "/path/to/foo" +key = "value" +``` + +This section states that for the target `x86_64-unknown-linux-gnu` the library +named `foo` has the metadata specified. This metadata is the same as the +metadata generated as if the build script had run, providing a number of +key/value pairs where the `rustc-flags`, `rustc-link-search`, and +`rustc-link-lib` keys are slightly special. + +With this configuration, if a package declares that it links to `foo` then the +build script will **not** be compiled or run, and the metadata specified will +instead be used. + +### Case study: Code generation + +Some Cargo packages need to have code generated just before they are compiled +for various reasons. Here we’ll walk through a simple example which generates a +library call as part of the build script. + +First, let’s take a look at the directory structure of this package: + +``` +. +├── Cargo.toml +├── build.rs +└── src + └── main.rs + +1 directory, 3 files +``` + +Here we can see that we have a `build.rs` build script and our binary in +`main.rs`. Next, let’s take a look at the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-from-generated-code" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +Here we can see we’ve got a build script specified which we’ll use to generate +some code. Let’s see what’s inside the build script: + +```rust,no_run +// build.rs + +use std::env; +use std::fs::File; +use std::io::Write; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("hello.rs"); + let mut f = File::create(&dest_path).unwrap(); + + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); +} +``` + +There’s a couple of points of note here: + +* The script uses the `OUT_DIR` environment variable to discover where the + output files should be located. It can use the process’ current working + directory to find where the input files should be located, but in this case we + don’t have any input files. +* This script is relatively simple as it just writes out a small generated file. + One could imagine that other more fanciful operations could take place such as + generating a Rust module from a C header file or another language definition, + for example. + +Next, let’s peek at the library itself: + +```rust,ignore +// src/main.rs + +include!(concat!(env!("OUT_DIR"), "/hello.rs")); + +fn main() { + println!("{}", message()); +} +``` + +This is where the real magic happens. The library is using the rustc-defined +`include!` macro in combination with the `concat!` and `env!` macros to include +the generated file (`hello.rs`) into the crate’s compilation. + +Using the structure shown here, crates can include any number of generated files +from the build script itself. + +### Case study: Building some native code + +Sometimes it’s necessary to build some native C or C++ code as part of a +package. This is another excellent use case of leveraging the build script to +build a native library before the Rust crate itself. As an example, we’ll create +a Rust library which calls into C to print “Hello, World!”. + +Like above, let’s first take a look at the project layout: + +``` +. +├── Cargo.toml +├── build.rs +└── src + ├── hello.c + └── main.rs + +1 directory, 4 files +``` + +Pretty similar to before! Next, the manifest: + +```toml +# Cargo.toml + +[package] +name = "hello-world-from-c" +version = "0.1.0" +authors = ["you@example.com"] +build = "build.rs" +``` + +For now we’re not going to use any build dependencies, so let’s take a look at +the build script now: + +```rust,no_run +// build.rs + +use std::process::Command; +use std::env; +use std::path::Path; + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + + // note that there are a number of downsides to this approach, the comments + // below detail how to improve the portability of these commands. + Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) + .arg(&format!("{}/hello.o", out_dir)) + .status().unwrap(); + Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) + .current_dir(&Path::new(&out_dir)) + .status().unwrap(); + + println!("cargo:rustc-link-search=native={}", out_dir); + println!("cargo:rustc-link-lib=static=hello"); +} +``` + +This build script starts out by compiling our C file into an object file (by +invoking `gcc`) and then converting this object file into a static library (by +invoking `ar`). The final step is feedback to Cargo itself to say that our +output was in `out_dir` and the compiler should link the crate to `libhello.a` +statically via the `-l static=hello` flag. + +Note that there are a number of drawbacks to this hardcoded approach: + +* The `gcc` command itself is not portable across platforms. For example it’s + unlikely that Windows platforms have `gcc`, and not even all Unix platforms + may have `gcc`. The `ar` command is also in a similar situation. +* These commands do not take cross-compilation into account. If we’re cross + compiling for a platform such as Android it’s unlikely that `gcc` will produce + an ARM executable. + +Not to fear, though, this is where a `build-dependencies` entry would help! The +Cargo ecosystem has a number of packages to make this sort of task much easier, +portable, and standardized. For example, the build script could be written as: + +```rust,ignore +// build.rs + +// Bring in a dependency on an externally maintained `cc` package which manages +// invoking the C compiler. +extern crate cc; + +fn main() { + cc::Build::new() + .file("src/hello.c") + .compile("hello"); +} +``` + +Add a build time dependency on the `cc` crate with the following addition to +your `Cargo.toml`: + +```toml +[build-dependencies] +gcc = "1.0" +``` + +The [`cc` crate](https://crates.io/crates/cc) abstracts a range of build +script requirements for C code: + +* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` + for Unix platforms, etc.). +* It takes the `TARGET` variable into account by passing appropriate flags to + the compiler being used. +* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all + handled automatically. +* The stdout output and `OUT_DIR` locations are also handled by the `cc` + library. + +Here we can start to see some of the major benefits of farming as much +functionality as possible out to common build dependencies rather than +duplicating logic across all build scripts! + +Back to the case study though, let’s take a quick look at the contents of the +`src` directory: + +```c +// src/hello.c + +#include + +void hello() { + printf("Hello, World!\n"); +} +``` + +```rust,ignore +// src/main.rs + +// Note the lack of the `#[link]` attribute. We’re delegating the responsibility +// of selecting what to link to over to the build script rather than hardcoding +// it in the source file. +extern { fn hello(); } + +fn main() { + unsafe { hello(); } +} +``` + +And there we go! This should complete our example of building some C code from a +Cargo package using the build script itself. This also shows why using a build +dependency can be crucial in many situations and even much more concise! + +We’ve also seen a brief example of how a build script can use a crate as a +dependency purely for the build process and not for the crate itself at runtime. + +### Case study: Linking to system libraries + +The final case study here will be investigating how a Cargo library links to a +system library and how the build script is leveraged to support this use case. + +Quite frequently a Rust crate wants to link to a native library often provided +on the system to bind its functionality or just use it as part of an +implementation detail. This is quite a nuanced problem when it comes to +performing this in a platform-agnostic fashion, and the purpose of a build +script is again to farm out as much of this as possible to make this as easy as +possible for consumers. + +As an example to follow, let’s take a look at one of [Cargo’s own +dependencies][git2-rs], [libgit2][libgit2]. The C library has a number of +constraints: + +[git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys +[libgit2]: https://github.com/libgit2/libgit2 + +* It has an optional dependency on OpenSSL on Unix to implement the https + transport. +* It has an optional dependency on libssh2 on all platforms to implement the ssh + transport. +* It is often not installed on all systems by default. +* It can be built from source using `cmake`. + +To visualize what’s going on here, let’s take a look at the manifest for the +relevant Cargo package that links to the native C library. + +```toml +[package] +name = "libgit2-sys" +version = "0.1.0" +authors = ["..."] +links = "git2" +build = "build.rs" + +[dependencies] +libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" } + +[target.'cfg(unix)'.dependencies] +openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" } + +# ... +``` + +As the above manifests show, we’ve got a `build` script specified, but it’s +worth noting that this example has a `links` entry which indicates that the +crate (`libgit2-sys`) links to the `git2` native library. + +Here we also see that we chose to have the Rust crate have an unconditional +dependency on `libssh2` via the `libssh2-sys` crate, as well as a +platform-specific dependency on `openssl-sys` for \*nix (other variants elided +for now). It may seem a little counterintuitive to express *C dependencies* in +the *Cargo manifest*, but this is actually using one of Cargo’s conventions in +this space. + +### `*-sys` Packages + +To alleviate linking to system libraries, Cargo has a *convention* of package +naming and functionality. Any package named `foo-sys` will provide two major +pieces of functionality: + +* The library crate will link to the native library `libfoo`. This will often + probe the current system for `libfoo` before resorting to building from + source. +* The library crate will provide **declarations** for functions in `libfoo`, + but it does **not** provide bindings or higher-level abstractions. + +The set of `*-sys` packages provides a common set of dependencies for linking +to native libraries. There are a number of benefits earned from having this +convention of native-library-related packages: + +* Common dependencies on `foo-sys` alleviates the above rule about one package + per value of `links`. +* A common dependency allows centralizing logic on discovering `libfoo` itself + (or building it from source). +* These dependencies are easily overridable. + +### Building libgit2 + +Now that we’ve got libgit2’s dependencies sorted out, we need to actually write +the build script. We’re not going to look at specific snippets of code here and +instead only take a look at the high-level details of the build script of +`libgit2-sys`. This is not recommending all packages follow this strategy, but +rather just outlining one specific strategy. + +The first step of the build script should do is to query whether libgit2 is +already installed on the host system. To do this we’ll leverage the preexisting +tool `pkg-config` (when its available). We’ll also use a `build-dependencies` +section to refactor out all the `pkg-config` related code (or someone’s already +done that!). + +If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t +installed, the next step is to build libgit2 from bundled source code +(distributed as part of `libgit2-sys` itself). There are a few nuances when +doing so that we need to take into account, however: + +* The build system of libgit2, `cmake`, needs to be able to find libgit2’s + optional dependency of libssh2. We’re sure we’ve already built it (it’s a + Cargo dependency), we just need to communicate this information. To do this + we leverage the metadata format to communicate information between build + scripts. In this example the libssh2 package printed out `cargo:root=...` to + tell us where libssh2 is installed at, and we can then pass this along to + cmake with the `CMAKE_PREFIX_PATH` environment variable. + +* We’ll need to handle some `CFLAGS` values when compiling C code (and tell + `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit + code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well. + +* Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR` + environment variable, and then we’ll print the necessary metadata to instruct + rustc how to link to libgit2. + +Most of the functionality of this build script is easily refactorable into +common dependencies, so our build script isn’t quite as intimidating as this +descriptions! In reality it’s expected that build scripts are quite succinct by +farming logic such as above to build dependencies. diff --git a/src/doc/src/reference/config.md b/src/doc/src/reference/config.md new file mode 100644 index 000000000..d72e9a75d --- /dev/null +++ b/src/doc/src/reference/config.md @@ -0,0 +1,141 @@ +## Configuration + +This document will explain how Cargo’s configuration system works, as well as +available keys or configuration. For configuration of a project through its +manifest, see the [manifest format](reference/manifest.html). + +### Hierarchical structure + + +Cargo allows local configuration for a particular project as well as global +configuration, like git. Cargo extends this to a hierarchical strategy. +If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the +following configuration files would be probed for and unified in this order: + +* `/projects/foo/bar/baz/.cargo/config` +* `/projects/foo/bar/.cargo/config` +* `/projects/foo/.cargo/config` +* `/projects/.cargo/config` +* `/.cargo/config` +* `$HOME/.cargo/config` + +With this structure, you can specify configuration per-project, and even +possibly check it into version control. You can also specify personal defaults +with a configuration file in your home directory. + +### Configuration format + +All configuration is currently in the [TOML format][toml] (like the manifest), +with simple key-value pairs inside of sections (tables) which all get merged +together. + +[toml]: https://github.com/toml-lang/toml + +### Configuration keys + +All of the following keys are optional, and their defaults are listed as their +value unless otherwise noted. + +Key values that specify a tool may be given as an absolute path, a relative path +or as a pathless tool name. Absolute paths and pathless tool names are used as +given. Relative paths are resolved relative to the parent directory of the +`.cargo` directory of the config file that the value resides within. + +```toml +# An array of paths to local repositories which are to be used as overrides for +# dependencies. For more information see the Specifying Dependencies guide. +paths = ["/path/to/override"] + +[cargo-new] +# This is your name/email to place in the `authors` section of a new Cargo.toml +# that is generated. If not present, then `git` will be probed, and if that is +# not present then `$USER` and `$EMAIL` will be used. +name = "..." +email = "..." + +# By default `cargo new` will initialize a new Git repository. This key can be +# set to `hg` to create a Mercurial repository, or `none` to disable this +# behavior. +vcs = "none" + +# For the following sections, $triple refers to any valid target triple, not the +# literal string "$triple", and it will apply whenever that target triple is +# being compiled to. 'cfg(...)' refers to the Rust-like `#[cfg]` syntax for +# conditional compilation. +[target.$triple] +# This is the linker which is passed to rustc (via `-C linker=`) when the `$triple` +# is being compiled for. By default this flag is not passed to the compiler. +linker = ".." +# Same but for the library archiver which is passed to rustc via `-C ar=`. +ar = ".." +# If a runner is provided, compiled targets for the `$triple` will be executed +# by invoking the specified runner executable with actual target as first argument. +# This applies to `cargo run`, `cargo test` and `cargo bench` commands. +# By default compiled targets are executed directly. +runner = ".." +# custom flags to pass to all compiler invocations that target $triple +# this value overrides build.rustflags when both are present +rustflags = ["..", ".."] + +[target.'cfg(...)'] +# Similar for the $triple configuration, but using the `cfg` syntax. +# If several `cfg` and $triple targets are candidates, then the rustflags +# are concatenated. The `cfg` syntax only applies to rustflags, and not to +# linker. +rustflags = ["..", ".."] + +# Configuration keys related to the registry +[registry] +index = "..." # URL of the registry index (defaults to the central repository) +token = "..." # Access token (found on the central repo’s website) + +[http] +proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none) + # in libcurl format, e.g. "socks5h://host:port" +timeout = 60000 # Timeout for each HTTP request, in milliseconds +cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional) +check-revoke = true # Indicates whether SSL certs are checked for revocation + +[build] +jobs = 1 # number of parallel jobs, defaults to # of CPUs +rustc = "rustc" # the rust compiler tool +rustdoc = "rustdoc" # the doc generator tool +target = "triple" # build for the target triple +target-dir = "target" # path of where to place all generated artifacts +rustflags = ["..", ".."] # custom flags to pass to all compiler invocations +incremental = true # whether or not to enable incremental compilation +dep-info-basedir = ".." # full path for the base directory for targets in depfiles + +[term] +verbose = false # whether cargo provides verbose output +color = 'auto' # whether cargo colorizes output + +# Network configuration +[net] +retry = 2 # number of times a network call will automatically retried + +# Alias cargo commands. The first 3 aliases are built in. If your +# command requires grouped whitespace use the list format. +[alias] +b = "build" +t = "test" +r = "run" +rr = "run --release" +space_example = ["run", "--release", "--", "\"command list\""] +``` + +### Environment variables + +Cargo can also be configured through environment variables in addition to the +TOML syntax above. For each configuration key above of the form `foo.bar` the +environment variable `CARGO_FOO_BAR` can also be used to define the value. For +example the `build.jobs` key can also be defined by `CARGO_BUILD_JOBS`. + +Environment variables will take precedent over TOML configuration, and currently +only integer, boolean, and string keys are supported to be defined by +environment variables. + +In addition to the system above, Cargo recognizes a few other specific +[environment variables][env]. + +[env]: reference/environment-variables.html diff --git a/src/doc/src/reference/environment-variables.md b/src/doc/src/reference/environment-variables.md new file mode 100644 index 000000000..b5f81e2d2 --- /dev/null +++ b/src/doc/src/reference/environment-variables.md @@ -0,0 +1,136 @@ +## Environment Variables + +Cargo sets and reads a number of environment variables which your code can detect +or override. Here is a list of the variables Cargo sets, organized by when it interacts +with them: + +### Environment variables Cargo reads + +You can override these environment variables to change Cargo's behavior on your +system: + +* `CARGO_HOME` - Cargo maintains a local cache of the registry index and of git + checkouts of crates. By default these are stored under `$HOME/.cargo`, but + this variable overrides the location of this directory. Once a crate is cached + it is not removed by the clean command. +* `CARGO_TARGET_DIR` - Location of where to place all generated artifacts, + relative to the current working directory. +* `RUSTC` - Instead of running `rustc`, Cargo will execute this specified + compiler instead. +* `RUSTC_WRAPPER` - Instead of simply running `rustc`, Cargo will execute this + specified wrapper instead, passing as its commandline arguments the rustc + invocation, with the first argument being rustc. +* `RUSTDOC` - Instead of running `rustdoc`, Cargo will execute this specified + `rustdoc` instance instead. +* `RUSTDOCFLAGS` - A space-separated list of custom flags to pass to all `rustdoc` + invocations that Cargo performs. In contrast with `cargo rustdoc`, this is + useful for passing a flag to *all* `rustdoc` instances. +* `RUSTFLAGS` - A space-separated list of custom flags to pass to all compiler + invocations that Cargo performs. In contrast with `cargo rustc`, this is + useful for passing a flag to *all* compiler instances. +* `CARGO_INCREMENTAL` - If this is set to 1 then Cargo will force incremental + compilation to be enabled for the current compilation, and when set to 0 it + will force disabling it. If this env var isn't present then cargo's defaults + will otherwise be used. + +Note that Cargo will also read environment variables for `.cargo/config` +configuration values, as described in [that documentation][config-env] + +[config-env]: reference/config.html#environment-variables + +### Environment variables Cargo sets for crates + +Cargo exposes these environment variables to your crate when it is compiled. +Note that this applies for test binaries as well. +To get the value of any of these variables in a Rust program, do this: + +```rust +let version = env!("CARGO_PKG_VERSION"); +``` + +`version` will now contain the value of `CARGO_PKG_VERSION`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest of your package. +* `CARGO_PKG_VERSION` - The full version of your package. +* `CARGO_PKG_VERSION_MAJOR` - The major version of your package. +* `CARGO_PKG_VERSION_MINOR` - The minor version of your package. +* `CARGO_PKG_VERSION_PATCH` - The patch version of your package. +* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. +* `CARGO_PKG_AUTHORS` - Colon separated list of authors from the manifest of your package. +* `CARGO_PKG_NAME` - The name of your package. +* `CARGO_PKG_DESCRIPTION` - The description of your package. +* `CARGO_PKG_HOMEPAGE` - The home page of your package. +* `OUT_DIR` - If the package has a build script, this is set to the folder where the build + script should place its output. See below for more information. + +### Environment variables Cargo sets for build scripts + +Cargo sets several environment variables when build scripts are run. Because these variables +are not yet set when the build script is compiled, the above example using `env!` won't work +and instead you'll need to retrieve the values when the build script is run: + +```rust +use std::env; +let out_dir = env::var("OUT_DIR").unwrap(); +``` + +`out_dir` will now contain the value of `OUT_DIR`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package + being built (the package containing the build + script). Also note that this is the value of the + current working directory of the build script when it + starts. +* `CARGO_MANIFEST_LINKS` - the manifest `links` value. +* `CARGO_FEATURE_` - For each activated feature of the package being + built, this environment variable will be present + where `` is the name of the feature uppercased + and having `-` translated to `_`. +* `CARGO_CFG_` - For each [configuration option][configuration] of the + package being built, this environment variable will + contain the value of the configuration, where `` is + the name of the configuration uppercased and having `-` + translated to `_`. + Boolean configurations are present if they are set, and + not present otherwise. + Configurations with multiple values are joined to a + single variable with the values delimited by `,`. +* `OUT_DIR` - the folder in which all output should be placed. This folder is + inside the build directory for the package being built, and it is + unique for the package in question. +* `TARGET` - the target triple that is being compiled for. Native code should be + compiled for this triple. Some more information about target + triples can be found in [clang’s own documentation][clang]. +* `HOST` - the host triple of the rust compiler. +* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can + be useful to pass a `-j` parameter to a system like `make`. Note + that care should be taken when interpreting this environment + variable. For historical purposes this is still provided but + recent versions of Cargo, for example, do not need to run `make + -j` as it'll automatically happen. Cargo implements its own + [jobserver] and will allow build scripts to inherit this + information, so programs compatible with GNU make jobservers will + already have appropriately configured parallelism. +* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the + profile currently being built. +* `PROFILE` - `release` for release builds, `debug` for other builds. +* `DEP__` - For more information about this set of environment + variables, see build script documentation about [`links`][links]. +* `RUSTC`, `RUSTDOC` - the compiler and documentation generator that Cargo has + resolved to use, passed to the build script so it might + use it as well. + +[links]: reference/build-scripts.html#the-links-manifest-key +[profile]: reference/manifest.html#the-profile-sections +[configuration]: https://doc.rust-lang.org/reference/attributes.html#conditional-compilation +[clang]: http://clang.llvm.org/docs/CrossCompilation.html#target-triple +[jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html + +### Environment variables Cargo sets for 3rd party subcommands + +Cargo exposes this environment variable to 3rd party subcommands +(ie. programs named `cargo-foobar` placed in `$PATH`): + +* `CARGO` - Path to the `cargo` binary performing the build. diff --git a/src/doc/src/reference/external-tools.md b/src/doc/src/reference/external-tools.md new file mode 100644 index 000000000..0ba2c5186 --- /dev/null +++ b/src/doc/src/reference/external-tools.md @@ -0,0 +1,103 @@ +## External tools + +One of the goals of Cargo is simple integration with third-party tools, like +IDEs and other build systems. To make integration easier, Cargo has several +facilities: + +* a `cargo metadata` command, which outputs project structure and dependencies + information in JSON, + +* a `--message-format` flag, which outputs information about a particular build, + and + +* support for custom subcommands. + + +### Information about project structure + +You can use `cargo metadata` command to get information about project structure +and dependencies. The output of the command looks like this: + +```text +{ + // Integer version number of the format. + "version": integer, + + // List of packages for this workspace, including dependencies. + "packages": [ + { + // Opaque package identifier. + "id": PackageId, + + "name": string, + + "version": string, + + "source": SourceId, + + // A list of declared dependencies, see `resolve` field for actual dependencies. + "dependencies": [ Dependency ], + + "targets: [ Target ], + + // Path to Cargo.toml + "manifest_path": string, + } + ], + + "workspace_members": [ PackageId ], + + // Dependencies graph. + "resolve": { + "nodes": [ + { + "id": PackageId, + "dependencies": [ PackageId ] + } + ] + } +} +``` + +The format is stable and versioned. When calling `cargo metadata`, you should +pass `--format-version` flag explicitly to avoid forward incompatibility +hazard. + +If you are using Rust, there is [cargo_metadata] crate. + +[cargo_metadata]: https://crates.io/crates/cargo_metadata + + +### Information about build + +When passing `--message-format=json`, Cargo will output the following +information during the build: + +* compiler errors and warnings, + +* produced artifacts, + +* results of the build scripts (for example, native dependencies). + +The output goes to stdout in the JSON object per line format. The `reason` field +distinguishes different kinds of messages. + +Information about dependencies in the Makefile-compatible format is stored in +the `.d` files alongside the artifacts. + + +### Custom subcommands + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. This is achieved by translating a cargo invocation of the form +cargo `(?[^ ]+)` into an invocation of an external tool +`cargo-${command}` that then needs to be present in one of the user's `$PATH` +directories. + +Custom subcommand may use `CARGO` environment variable to call back to +Cargo. Alternatively, it can link to `cargo` crate as a library, but this +approach has drawbacks: + +* Cargo as a library is unstable, API changes without deprecation, + +* versions of Cargo library and Cargo binary may be different. diff --git a/src/doc/src/reference/index.md b/src/doc/src/reference/index.md new file mode 100644 index 000000000..634dd34f8 --- /dev/null +++ b/src/doc/src/reference/index.md @@ -0,0 +1,13 @@ +## Cargo Reference + +The reference covers the details of various areas of Cargo. + +* [Specifying Dependencies](reference/specifying-dependencies.html) +* [The Manifest Format](reference/manifest.html) +* [Configuration](reference/config.html) +* [Environment Variables](reference/environment-variables.html) +* [Build Scripts](reference/build-scripts.html) +* [Publishing on crates.io](reference/publishing.html) +* [Package ID Specifications](reference/pkgid-spec.html) +* [Source Replacement](reference/source-replacement.html) +* [External Tools](reference/external-tools.html) diff --git a/src/doc/src/reference/manifest.md b/src/doc/src/reference/manifest.md new file mode 100644 index 000000000..ebc539dc3 --- /dev/null +++ b/src/doc/src/reference/manifest.md @@ -0,0 +1,824 @@ +## The Manifest Format + +The `Cargo.toml` file for each package is called its *manifest*. Every manifest +file consists of one or more sections. + +### The `[package]` section + +The first section in a `Cargo.toml` is `[package]`. + +```toml +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["you@example.com"] +``` + +All three of these fields are mandatory. + +#### The `version` field + +Cargo bakes in the concept of [Semantic +Versioning](http://semver.org/), so make sure you follow some basic rules: + +* Before you reach 1.0.0, anything goes, but if you make breaking changes, + increment the minor version. In Rust, breaking changes include adding fields to + structs or variants to enums. +* After 1.0.0, only make breaking changes when you increment the major version. + Don’t break the build. +* After 1.0.0, don’t add any new public API (no new `pub` anything) in tiny + versions. Always increment the minor version if you add any new `pub` structs, + traits, fields, types, functions, methods or anything else. +* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. + +#### The `build` field (optional) + +This field specifies a file in the project root which is a [build script][1] for +building native code. More information can be found in the build script +[guide][1]. + +[1]: reference/build-scripts.html + +```toml +[package] +# ... +build = "build.rs" +``` + +#### The `documentation` field (optional) + +This field specifies a URL to a website hosting the crate's documentation. +If no URL is specified in the manifest file, [crates.io][cratesio] will +automatically link your crate to the corresponding [docs.rs][docsrs] page. + +Documentation links from specific hosts are blacklisted. Hosts are added +to the blacklist if they are known to not be hosting documentation and are +possibly of malicious intent e.g. ad tracking networks. URLs from the +following hosts are blacklisted: + +* rust-ci.org + +Documentation URLs from blacklisted hosts will not appear on crates.io, and +may be replaced by docs.rs links. + +[docsrs]: https://docs.rs/ +[cratesio]: https://crates.io/ + +#### The `exclude` and `include` fields (optional) + +You can explicitly specify to Cargo that a set of [globs][globs] should be +ignored or included for the purposes of packaging and rebuilding a package. The +globs specified in the `exclude` field identify a set of files that are not +included when a package is published as well as ignored for the purposes of +detecting when to rebuild a package, and the globs in `include` specify files +that are explicitly included. + +If a VCS is being used for a package, the `exclude` field will be seeded with +the VCS’ ignore settings (`.gitignore` for git for example). + +```toml +[package] +# ... +exclude = ["build/**/*.o", "doc/**/*.html"] +``` + +```toml +[package] +# ... +include = ["src/**/*", "Cargo.toml"] +``` + +The options are mutually exclusive: setting `include` will override an +`exclude`. Note that `include` must be an exhaustive list of files as otherwise +necessary source files may not be included. + +[globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html + +#### Migrating to `gitignore`-like pattern matching + +The current interpretation of these configs is based on UNIX Globs, as +implemented in the [`glob` crate](https://crates.io/crates/glob). We want +Cargo's `include` and `exclude` configs to work as similar to `gitignore` as +possible. [The `gitignore` specification](https://git-scm.com/docs/gitignore) is +also based on Globs, but has a bunch of additional features that enable easier +pattern writing and more control. Therefore, we are migrating the interpretation +for the rules of these configs to use the [`ignore` +crate](https://crates.io/crates/ignore), and treat them each rule as a single +line in a `gitignore` file. See [the tracking +issue](https://github.com/rust-lang/cargo/issues/4268) for more details on the +migration. + +#### The `publish` field (optional) + +The `publish` field can be used to prevent a package from being published to a +package registry (like *crates.io*) by mistake. + +```toml +[package] +# ... +publish = false +``` + +#### The `workspace` field (optional) + +The `workspace` field can be used to configure the workspace that this package +will be a member of. If not specified this will be inferred as the first +Cargo.toml with `[workspace]` upwards in the filesystem. + +```toml +[package] +# ... +workspace = "path/to/workspace/root" +``` + +For more information, see the documentation for the workspace table below. + +#### Package metadata + +There are a number of optional metadata fields also accepted under the +`[package]` section: + +```toml +[package] +# ... + +# A short blurb about the package. This is not rendered in any format when +# uploaded to crates.io (aka this is not markdown). +description = "..." + +# These URLs point to more information about the package. These are +# intended to be webviews of the relevant data, not necessarily compatible +# with VCS tools and the like. +documentation = "..." +homepage = "..." +repository = "..." + +# This points to a file under the package root (relative to this `Cargo.toml`). +# The contents of this file are stored and indexed in the registry. +# crates.io will render this file and place the result on the crate's page. +readme = "..." + +# This is a list of up to five keywords that describe this crate. Keywords +# are searchable on crates.io, and you may choose any words that would +# help someone find this crate. +keywords = ["...", "..."] + +# This is a list of up to five categories where this crate would fit. +# Categories are a fixed list available at crates.io/category_slugs, and +# they must match exactly. +categories = ["...", "..."] + +# This is an SPDX 2.1 license expression for this package. Currently +# crates.io will validate the license provided against a whitelist of +# known license and exception identifiers from the SPDX license list +# 2.4. Parentheses are not currently supported. +# +# Multiple licenses can be separated with a `/`, although that usage +# is deprecated. Instead, use a license expression with AND and OR +# operators to get more explicit semantics. +license = "..." + +# If a project is using a nonstandard license, then this key may be specified in +# lieu of the above key and must point to a file relative to this manifest +# (similar to the readme key). +license-file = "..." + +# Optional specification of badges to be displayed on crates.io. +# +# - The badges pertaining to build status that are currently available are +# Appveyor, CircleCI, GitLab, and TravisCI. +# - Available badges pertaining to code test coverage are Codecov and +# Coveralls. +# - There are also maintenance-related badges based on isitmaintained.com +# which state the issue resolution time, percent of open issues, and future +# maintenance intentions. +# +# If a `repository` key is required, this refers to a repository in +# `user/repo` format. +[badges] + +# Appveyor: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`; `id` is optional; you can specify the appveyor project id if you +# want to use that instead. `project_name` is optional; use when the repository +# name differs from the appveyor project name. +appveyor = { repository = "...", branch = "master", service = "github" } + +# Circle CI: `repository` is required. `branch` is optional; default is `master` +circle-ci = { repository = "...", branch = "master" } + +# GitLab: `repository` is required. `branch` is optional; default is `master` +gitlab = { repository = "...", branch = "master" } + +# Travis CI: `repository` in format "/" is required. +# `branch` is optional; default is `master` +travis-ci = { repository = "...", branch = "master" } + +# Codecov: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`. +codecov = { repository = "...", branch = "master", service = "github" } + +# Coveralls: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default) and `bitbucket`. +coveralls = { repository = "...", branch = "master", service = "github" } + +# Is it maintained resolution time: `repository` is required. +is-it-maintained-issue-resolution = { repository = "..." } + +# Is it maintained percentage of open issues: `repository` is required. +is-it-maintained-open-issues = { repository = "..." } + +# Maintenance: `status` is required Available options are `actively-developed`, +# `passively-maintained`, `as-is`, `none`, `experimental`, `looking-for-maintainer` +# and `deprecated`. +maintenance = { status = "..." } +``` + +The [crates.io](https://crates.io) registry will render the description, display +the license, link to the three URLs and categorize by the keywords. These keys +provide useful information to users of the registry and also influence the +search ranking of a crate. It is highly discouraged to omit everything in a +published crate. + +SPDX 2.1 license expressions are documented +[here][spdx-2.1-license-expressions]. The current version of the +license list is available [here][spdx-license-list], and version 2.4 +is available [here][spdx-license-list-2.4]. + +#### The `metadata` table (optional) + +Cargo by default will warn about unused keys in `Cargo.toml` to assist in +detecting typos and such. The `package.metadata` table, however, is completely +ignored by Cargo and will not be warned about. This section can be used for +tools which would like to store project configuration in `Cargo.toml`. For +example: + +```toml +[package] +name = "..." +# ... + +# Metadata used when generating an Android APK, for example. +[package.metadata.android] +package-name = "my-awesome-android-app" +assets = "path/to/static" +``` + +### Dependency sections + +See the [specifying dependencies page](reference/specifying-dependencies.html) for +information on the `[dependencies]`, `[dev-dependencies]`, +`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. + +### The `[profile.*]` sections + +Cargo supports custom configuration of how rustc is invoked through profiles at +the top level. Any manifest may declare a profile, but only the top level +project’s profiles are actually read. All dependencies’ profiles will be +overridden. This is done so the top-level project has control over how its +dependencies are compiled. + +There are five currently supported profile names, all of which have the same +configuration available to them. Listed below is the configuration available, +along with the defaults for each profile. + +```toml +# The development profile, used for `cargo build`. +[profile.dev] +opt-level = 0 # controls the `--opt-level` the compiler builds with. + # 0-1 is good for debugging. 2 is well-optimized. Max is 3. +debug = true # include debug information (debug symbols). Equivalent to + # `-C debuginfo=2` compiler flag. +rpath = false # controls whether compiler should set loader paths. + # If true, passes `-C rpath` flag to the compiler. +lto = false # Link Time Optimization usually reduces size of binaries + # and static libraries. Increases compilation time. + # If true, passes `-C lto` flag to the compiler, and if a + # string is specified like 'thin' then `-C lto=thin` will + # be passed. +debug-assertions = true # controls whether debug assertions are enabled + # (e.g. debug_assert!() and arithmetic overflow checks) +codegen-units = 16 # if > 1 enables parallel code generation which improves + # compile times, but prevents some optimizations. + # Passes `-C codegen-units`. +panic = 'unwind' # panic strategy (`-C panic=...`), can also be 'abort' +incremental = true # whether or not incremental compilation is enabled +overflow-checks = true # use overflow checks for integer arithmetic. + # Passes the `-C overflow-checks=...` flag to the compiler. + +# The release profile, used for `cargo build --release`. +[profile.release] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 16 +panic = 'unwind' +incremental = false +overflow-checks = false + +# The testing profile, used for `cargo test`. +[profile.test] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 16 +panic = 'unwind' +incremental = true +overflow-checks = true + +# The benchmarking profile, used for `cargo bench` and `cargo test --release`. +[profile.bench] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 16 +panic = 'unwind' +incremental = false +overflow-checks = false + +# The documentation profile, used for `cargo doc`. +[profile.doc] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 16 +panic = 'unwind' +incremental = true +overflow-checks = true +``` + +### The `[features]` section + +Cargo supports features to allow expression of: + +* conditional compilation options (usable through `cfg` attributes); +* optional dependencies, which enhance a package, but are not required; and +* clusters of optional dependencies, such as `postgres`, that would include the + `postgres` package, the `postgres-macros` package, and possibly other packages + (such as development-time mocking libraries, debugging tools, etc.). + +A feature of a package is either an optional dependency, or a set of other +features. The format for specifying features is: + +```toml +[package] +name = "awesome" + +[features] +# The default set of optional packages. Most people will want to use these +# packages, but they are strictly optional. Note that `session` is not a package +# but rather another feature listed in this manifest. +default = ["jquery", "uglifier", "session"] + +# A feature with no dependencies is used mainly for conditional compilation, +# like `#[cfg(feature = "go-faster")]`. +go-faster = [] + +# The `secure-password` feature depends on the bcrypt package. This aliasing +# will allow people to talk about the feature in a higher-level way and allow +# this package to add more requirements to the feature in the future. +secure-password = ["bcrypt"] + +# Features can be used to reexport features of other packages. The `session` +# feature of package `awesome` will ensure that the `session` feature of the +# package `cookie` is also enabled. +session = ["cookie/session"] + +[dependencies] +# These packages are mandatory and form the core of this package’s distribution. +cookie = "1.2.0" +oauth = "1.1.0" +route-recognizer = "=2.1.0" + +# A list of all of the optional dependencies, some of which are included in the +# above `features`. They can be opted into by apps. +jquery = { version = "1.0.2", optional = true } +uglifier = { version = "1.5.3", optional = true } +bcrypt = { version = "*", optional = true } +civet = { version = "*", optional = true } +``` + +To use the package `awesome`: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +#### Rules + +The usage of features is subject to a few rules: + +* Feature names must not conflict with other package names in the manifest. This + is because they are opted into via `features = [...]`, which only has a single + namespace. +* With the exception of the `default` feature, all features are opt-in. To opt + out of the default feature, use `default-features = false` and cherry-pick + individual features. +* Feature groups are not allowed to cyclically depend on one another. +* Dev-dependencies cannot be optional. +* Features groups can only reference optional dependencies. +* When a feature is selected, Cargo will call `rustc` with `--cfg + feature="${feature_name}"`. If a feature group is included, it and all of its + individual features will be included. This can be tested in code via + `#[cfg(feature = "foo")]`. + +Note that it is explicitly allowed for features to not actually activate any +optional dependencies. This allows packages to internally enable/disable +features without requiring a new dependency. + +#### Usage in end products + +One major use-case for this feature is specifying optional features in +end-products. For example, the Servo project may want to include optional +features that people can enable or disable when they build it. + +In that case, Servo will describe features in its `Cargo.toml` and they can be +enabled using command-line flags: + +```console +$ cargo build --release --features "shumway pdf" +``` + +Default features could be excluded using `--no-default-features`. + +#### Usage in packages + +In most cases, the concept of *optional dependency* in a library is best +expressed as a separate package that the top-level application depends on. + +However, high-level packages, like Iron or Piston, may want the ability to +curate a number of packages for easy installation. The current Cargo system +allows them to curate a number of mandatory dependencies into a single package +for easy installation. + +In some cases, packages may want to provide additional curation for optional +dependencies: + +* grouping a number of low-level optional dependencies together into a single + high-level feature; +* specifying packages that are recommended (or suggested) to be included by + users of the package; and +* including a feature (like `secure-password` in the motivating example) that + will only work if an optional dependency is available, and would be difficult + to implement as a separate package (for example, it may be overly difficult to + design an IO package to be completely decoupled from OpenSSL, with opt-in via + the inclusion of a separate package). + +In almost all cases, it is an antipattern to use these features outside of +high-level packages that are designed for curation. If a feature is optional, it +can almost certainly be expressed as a separate package. + +### The `[workspace]` section + +Projects can define a workspace which is a set of crates that will all share the +same `Cargo.lock` and output directory. The `[workspace]` table can be defined +as: + +```toml +[workspace] + +# Optional key, inferred from path dependencies if not present. +# Additional non-path dependencies that should be included must be given here. +# In particular, for a virtual manifest, all members have to be listed. +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] + +# Optional key, empty if not present. +exclude = ["path1", "path/to/dir2"] +``` + +Workspaces were added to Cargo as part of [RFC 1525] and have a number of +properties: + +* A workspace can contain multiple crates where one of them is the *root crate*. +* The *root crate*'s `Cargo.toml` contains the `[workspace]` table, but is not + required to have other configuration. +* Whenever any crate in the workspace is compiled, output is placed in the + *workspace root*. i.e. next to the *root crate*'s `Cargo.toml`. +* The lock file for all crates in the workspace resides in the *workspace root*. +* The `[patch]`, `[replace]` and `[profile.*]` sections in `Cargo.toml` + are only recognized + in the *root crate*'s manifest, and ignored in member crates' manifests. + +[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md + +The *root crate* of a workspace, indicated by the presence of `[workspace]` in +its manifest, is responsible for defining the entire workspace. All `path` +dependencies residing in the workspace directory become members. You can add +additional packages to the workspace by listing them in the `members` key. Note +that members of the workspaces listed explicitly will also have their path +dependencies included in the workspace. Sometimes a project may have a lot of +workspace members and it can be onerous to keep up to date. The path dependency +can also use [globs][globs] to match multiple paths. Finally, the `exclude` +key can be used to blacklist paths from being included in a workspace. This can +be useful if some path dependencies aren't desired to be in the workspace at +all. + +The `package.workspace` manifest key (described above) is used in member crates +to point at a workspace's root crate. If this key is omitted then it is inferred +to be the first crate whose manifest contains `[workspace]` upwards in the +filesystem. + +A crate may either specify `package.workspace` or specify `[workspace]`. That +is, a crate cannot both be a root crate in a workspace (contain `[workspace]`) +and also be a member crate of another workspace (contain `package.workspace`). + +Most of the time workspaces will not need to be dealt with as `cargo new` and +`cargo init` will handle workspace configuration automatically. + +#### Virtual Manifest + +In workspace manifests, if the `package` table is present, the workspace root +crate will be treated as a normal package, as well as a workspace. If the +`package` table is not present in a workspace manifest, it is called a *virtual +manifest*. + +#### Package selection + +In a workspace, package-related cargo commands like `cargo build` apply to +packages selected by `-p` / `--package` or `--all` command-line parameters. +When neither is specified, the optional `default-members` configuration is used: + +```toml +[workspace] +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] +default-members = ["path/to/member2", "path/to/member3/foo"] +``` + +When specified, `default-members` must expand to a subset of `members`. + +When `default-members` is not specified, the default is the root manifest +if it is a package, or every member manifest (as if `--all` were specified +on the command-line) for virtual workspaces. + +#TODO: move this to a more appropriate place +### The project layout + +If your project is an executable, name the main source file `src/main.rs`. If it +is a library, name the main source file `src/lib.rs`. + +Cargo will also treat any files located in `src/bin/*.rs` as executables. If your +executable consists of more than just one source file, you might also use a directory +inside `src/bin` containing a `main.rs` file which will be treated as an executable +with a name of the parent directory. +Do note, however, once you add a `[[bin]]` section ([see +below](#configuring-a-target)), Cargo will no longer automatically build files +located in `src/bin/*.rs`. Instead you must create a `[[bin]]` section for +each file you want to build. + +Your project can optionally contain folders named `examples`, `tests`, and +`benches`, which Cargo will treat as containing examples, +integration tests, and benchmarks respectively. Analogous to `bin` targets, they +may be composed of single files or directories with a `main.rs` file. + +``` +▾ src/ # directory containing source files + lib.rs # the main entry point for libraries and packages + main.rs # the main entry point for projects producing executables + ▾ bin/ # (optional) directory containing additional executables + *.rs + ▾ */ # (optional) directories containing multi-file executables + main.rs +▾ examples/ # (optional) examples + *.rs +▾ */ # (optional) directories containing multi-file examples + main.rs +▾ tests/ # (optional) integration tests + *.rs +▾ */ # (optional) directories containing multi-file tests + main.rs +▾ benches/ # (optional) benchmarks + *.rs +▾ */ # (optional) directories containing multi-file benchmarks + main.rs +``` + +To structure your code after you've created the files and folders for your +project, you should remember to use Rust's module system, which you can read +about in [the book](https://doc.rust-lang.org/book/crates-and-modules.html). + +### Examples + +Files located under `examples` are example uses of the functionality provided by +the library. When compiled, they are placed in the `target/examples` directory. + +They can compile either as executables (with a `main()` function) or libraries +and pull in the library by using `extern crate `. They are +compiled when you run your tests to protect them from bitrotting. + +You can run individual executable examples with the command `cargo run --example +`. + +Specify `crate-type` to make an example be compiled as a library (additional +information about crate types is available in +[the Cargo reference](https://doc.rust-lang.org/reference/linkage.html)): + +```toml +[[example]] +name = "foo" +crate-type = ["staticlib"] +``` + +You can build individual library examples with the command `cargo build +--example `. + +### Tests + +When you run `cargo test`, Cargo will: + +* compile and run your library’s unit tests, which are in the files reachable + from `lib.rs` (naturally, any sections marked with `#[cfg(test)]` will be + considered at this stage); +* compile and run your library’s documentation tests, which are embedded inside + of documentation blocks; +* compile and run your library’s [integration tests](#integration-tests); and +* compile your library’s examples. + +#### Integration tests + +Each file in `tests/*.rs` is an integration test. When you run `cargo test`, +Cargo will compile each of these files as a separate crate. The crate can link +to your library by using `extern crate `, like any other code that +depends on it. + +Cargo will not automatically compile files inside subdirectories of `tests`, but +an integration test can import modules from these directories as usual. For +example, if you want several integration tests to share some code, you can put +the shared code in `tests/common/mod.rs` and then put `mod common;` in each of +the test files. + +### Configuring a target + +All of the `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]` +sections support similar configuration for specifying how a target should be +built. The double-bracket sections like `[[bin]]` are array-of-table of +[TOML](https://github.com/toml-lang/toml#array-of-tables), which means you can +write more than one `[[bin]]` section to make several executables in your crate. + +The example below uses `[lib]`, but it also applies to all other sections +as well. All values listed are the defaults for that option unless otherwise +specified. + +```toml +[package] +# ... + +[lib] +# The name of a target is the name of the library that will be generated. This +# is defaulted to the name of the package or project, with any dashes replaced +# with underscores. (Rust `extern crate` declarations reference this name; +# therefore the value must be a valid Rust identifier to be usable.) +name = "foo" + +# This field points at where the crate is located, relative to the `Cargo.toml`. +path = "src/lib.rs" + +# A flag for enabling unit tests for this target. This is used by `cargo test`. +test = true + +# A flag for enabling documentation tests for this target. This is only relevant +# for libraries, it has no effect on other sections. This is used by +# `cargo test`. +doctest = true + +# A flag for enabling benchmarks for this target. This is used by `cargo bench`. +bench = true + +# A flag for enabling documentation of this target. This is used by `cargo doc`. +doc = true + +# If the target is meant to be a compiler plugin, this field must be set to true +# for Cargo to correctly compile it and make it available for all dependencies. +plugin = false + +# If the target is meant to be a "macros 1.1" procedural macro, this field must +# be set to true. +proc-macro = false + +# If set to false, `cargo test` will omit the `--test` flag to rustc, which +# stops it from generating a test harness. This is useful when the binary being +# built manages the test runner itself. +harness = true +``` + +#### The `required-features` field (optional) + +The `required-features` field specifies which features the target needs in order +to be built. If any of the required features are not selected, the target will +be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`, +and `[[example]]` sections, it has no effect on `[lib]`. + +```toml +[features] +# ... +postgres = [] +sqlite = [] +tools = [] + +[[bin]] +# ... +required-features = ["postgres", "tools"] +``` + +#### Building dynamic or static libraries + +If your project produces a library, you can specify which kind of library to +build by explicitly listing the library in your `Cargo.toml`: + +```toml +# ... + +[lib] +name = "..." +crate-type = ["dylib"] # could be `staticlib` as well +``` + +The available options are `dylib`, `rlib`, `staticlib`, `cdylib`, and +`proc-macro`. You should only use this option in a project. Cargo will always +compile packages (dependencies) based on the requirements of the project that +includes them. + +You can read more about the different crate types in the +[Rust Reference Manual](https://doc.rust-lang.org/reference/linkage.html) + +### The `[patch]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[patch.crates-io] +foo = { git = 'https://github.com/example/foo' } +bar = { path = 'my/local/bar' } + +[dependencies.baz] +git = 'https://github.com/example/baz' + +[patch.'https://github.com/example/baz'] +baz = { git = 'https://github.com/example/patched-baz', branch = 'my-branch' } +``` + +The `[patch]` table is made of dependency-like sub-tables. Each key after +`[patch]` is a URL of the source that's being patched, or `crates-io` if +you're modifying the https://crates.io registry. In the example above +`crates-io` could be replaced with a git URL such as +`https://github.com/rust-lang-nursery/log`; the second `[patch]` +section in the example uses this to specify a source called `baz`. + +Each entry in these tables is a normal dependency specification, the same as +found in the `[dependencies]` section of the manifest. The dependencies listed +in the `[patch]` section are resolved and used to patch the source at the +URL specified. The above manifest snippet patches the `crates-io` source (e.g. +crates.io itself) with the `foo` crate and `bar` crate. It also +patches the `https://github.com/example/baz` source with a `my-branch` that +comes from elsewhere. + +Sources can be patched with versions of crates that do not exist, and they can +also be patched with versions of crates that already exist. If a source is +patched with a crate version that already exists in the source, then the +source's original crate is replaced. + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation and [RFC 1969] for the +technical specification of this feature. + +[RFC 1969]: https://github.com/rust-lang/rfcs/pull/1969 +[replace]: reference/specifying-dependencies.html#overriding-dependencies + +### The `[replace]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[replace] +"foo:0.1.0" = { git = 'https://github.com/example/foo' } +"bar:1.0.2" = { path = 'my/local/bar' } +``` + +Each key in the `[replace]` table is a [package id +specification](reference/pkgid-spec.html) which allows arbitrarily choosing a node in the +dependency graph to override. The value of each key is the same as the +`[dependencies]` syntax for specifying dependencies, except that you can't +specify features. Note that when a crate is overridden the copy it's overridden +with must have both the same name and version, but it can come from a different +source (e.g. git or a local path). + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation. + +[spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60 +[spdx-license-list]: https://spdx.org/licenses/ +[spdx-license-list-2.4]: https://github.com/spdx/license-list-data/tree/v2.4 diff --git a/src/doc/src/reference/pkgid-spec.md b/src/doc/src/reference/pkgid-spec.md new file mode 100644 index 000000000..bd7ac2d92 --- /dev/null +++ b/src/doc/src/reference/pkgid-spec.md @@ -0,0 +1,44 @@ +## Package ID Specifications + +### Package ID specifications + +Subcommands of Cargo frequently need to refer to a particular package within a +dependency graph for various operations like updating, cleaning, building, etc. +To solve this problem, Cargo supports Package ID Specifications. A specification +is a string which is used to uniquely refer to one package within a graph of +packages. + +#### Specification grammar + +The formal grammar for a Package Id Specification is: + +```notrust +pkgid := pkgname + | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] +pkgname := name [ ":" semver ] + +proto := "http" | "git" | ... +``` + +Here, brackets indicate that the contents are optional. + +#### Example specifications + +These could all be references to a package `foo` version `1.2.3` from the +registry at `crates.io` + +| pkgid | name | version | url | +|:-----------------------------|:-----:|:-------:|:----------------------:| +| `foo` | `foo` | `*` | `*` | +| `foo:1.2.3` | `foo` | `1.2.3` | `*` | +| `crates.io/foo` | `foo` | `*` | `*://crates.io/foo` | +| `crates.io/foo#1.2.3` | `foo` | `1.2.3` | `*://crates.io/foo` | +| `crates.io/bar#foo:1.2.3` | `foo` | `1.2.3` | `*://crates.io/bar` | +| `http://crates.io/foo#1.2.3` | `foo` | `1.2.3` | `http://crates.io/foo` | + +#### Brevity of specifications + +The goal of this is to enable both succinct and exhaustive syntaxes for +referring to packages in a dependency graph. Ambiguous references may refer to +one or more packages. Most commands generate an error if more than one package +could be referred to with the same specification. diff --git a/src/doc/src/reference/publishing.md b/src/doc/src/reference/publishing.md new file mode 100644 index 000000000..05439354c --- /dev/null +++ b/src/doc/src/reference/publishing.md @@ -0,0 +1,222 @@ +## Publishing on crates.io + +Once you've got a library that you'd like to share with the world, it's time to +publish it on [crates.io]! Publishing a crate is when a specific +version is uploaded to be hosted on [crates.io]. + +Take care when publishing a crate, because a publish is **permanent**. The +version can never be overwritten, and the code cannot be deleted. There is no +limit to the number of versions which can be published, however. + +### Before your first publish + +First thing’s first, you’ll need an account on [crates.io] to acquire +an API token. To do so, [visit the home page][crates.io] and log in via a GitHub +account (required for now). After this, visit your [Account +Settings](https://crates.io/me) page and run the `cargo login` command +specified. + +```console +$ cargo login abcdefghijklmnopqrstuvwxyz012345 +``` + +This command will inform Cargo of your API token and store it locally in your +`~/.cargo/credentials` (previously it was `~/.cargo/config`). Note that this +token is a **secret** and should not be shared with anyone else. If it leaks for +any reason, you should regenerate it immediately. + +### Before publishing a new crate + +Keep in mind that crate names on [crates.io] are allocated on a first-come-first- +serve basis. Once a crate name is taken, it cannot be used for another crate. + +#### Packaging a crate + +The next step is to package up your crate into a format that can be uploaded to +[crates.io]. For this we’ll use the `cargo package` subcommand. This will take +our entire crate and package it all up into a `*.crate` file in the +`target/package` directory. + +```console +$ cargo package +``` + +As an added bonus, the `*.crate` will be verified independently of the current +source tree. After the `*.crate` is created, it’s unpacked into +`target/package` and then built from scratch to ensure that all necessary files +are there for the build to succeed. This behavior can be disabled with the +`--no-verify` flag. + +Now’s a good time to take a look at the `*.crate` file to make sure you didn’t +accidentally package up that 2GB video asset, or large data files used for code +generation, integration tests, or benchmarking. There is currently a 10MB +upload size limit on `*.crate` files. So, if the size of `tests` and `benches` +directories and their dependencies are up to a couple of MBs, you can keep them +in your package; otherwise, better to exclude them. + +Cargo will automatically ignore files ignored by your version control system +when packaging, but if you want to specify an extra set of files to ignore you +can use the `exclude` key in the manifest: + +```toml +[package] +# ... +exclude = [ + "public/assets/*", + "videos/*", +] +``` + +The syntax of each element in this array is what +[rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you’d rather +roll with a whitelist instead of a blacklist, Cargo also supports an `include` +key, which if set, overrides the `exclude` key: + +```toml +[package] +# ... +include = [ + "**/*.rs", + "Cargo.toml", +] +``` + +### Uploading the crate + +Now that we’ve got a `*.crate` file ready to go, it can be uploaded to +[crates.io] with the `cargo publish` command. And that’s it, you’ve now published +your first crate! + +```console +$ cargo publish +``` + +If you’d like to skip the `cargo package` step, the `cargo publish` subcommand +will automatically package up the local crate if a copy isn’t found already. + +Be sure to check out the [metadata you can +specify](reference/manifest.html#package-metadata) to ensure your crate can be +discovered more easily! + +### Publishing a new version of an existing crate + +In order to release a new version, change the `version` value specified in your +`Cargo.toml` manifest. Keep in mind [the semver +rules](reference/manifest.html#the-version-field). Then optionally run `cargo package` if +you want to inspect the `*.crate` file for the new version before publishing, +and run `cargo publish` to upload the new version. + +### Managing a crates.io-based crate + +Management of crates is primarily done through the command line `cargo` tool +rather than the [crates.io] web interface. For this, there are a few subcommands +to manage a crate. + +#### `cargo yank` + +Occasions may arise where you publish a version of a crate that actually ends up +being broken for one reason or another (syntax error, forgot to include a file, +etc.). For situations such as this, Cargo supports a “yank” of a version of a +crate. + +```console +$ cargo yank --vers 1.0.1 +$ cargo yank --vers 1.0.1 --undo +``` + +A yank **does not** delete any code. This feature is not intended for deleting +accidentally uploaded secrets, for example. If that happens, you must reset +those secrets immediately. + +The semantics of a yanked version are that no new dependencies can be created +against that version, but all existing dependencies continue to work. One of the +major goals of [crates.io] is to act as a permanent archive of crates that does +not change over time, and allowing deletion of a version would go against this +goal. Essentially a yank means that all projects with a `Cargo.lock` will not +break, while any future `Cargo.lock` files generated will not list the yanked +version. + +#### `cargo owner` + +A crate is often developed by more than one person, or the primary maintainer +may change over time! The owner of a crate is the only person allowed to publish +new versions of the crate, but an owner may designate additional owners. + +```console +$ cargo owner --add my-buddy +$ cargo owner --remove my-buddy +$ cargo owner --add github:rust-lang:owners +$ cargo owner --remove github:rust-lang:owners +``` + +The owner IDs given to these commands must be GitHub user names or GitHub teams. + +If a user name is given to `--add`, that user becomes a “named” owner, with +full rights to the crate. In addition to being able to publish or yank versions +of the crate, they have the ability to add or remove owners, *including* the +owner that made *them* an owner. Needless to say, you shouldn’t make people you +don’t fully trust into a named owner. In order to become a named owner, a user +must have logged into [crates.io] previously. + +If a team name is given to `--add`, that team becomes a “team” owner, with +restricted right to the crate. While they have permission to publish or yank +versions of the crate, they *do not* have the ability to add or remove owners. +In addition to being more convenient for managing groups of owners, teams are +just a bit more secure against owners becoming malicious. + +The syntax for teams is currently `github:org:team` (see examples above). +In order to add a team as an owner one must be a member of that team. No +such restriction applies to removing a team as an owner. + +### GitHub permissions + +Team membership is not something GitHub provides simple public access to, and it +is likely for you to encounter the following message when working with them: + +> It looks like you don’t have permission to query a necessary property from +GitHub to complete this request. You may need to re-authenticate on [crates.io] +to grant permission to read GitHub org memberships. Just go to +https://crates.io/login + +This is basically a catch-all for “you tried to query a team, and one of the +five levels of membership access control denied this”. That is not an +exaggeration. GitHub’s support for team access control is Enterprise Grade. + +The most likely cause of this is simply that you last logged in before this +feature was added. We originally requested *no* permissions from GitHub when +authenticating users, because we didn’t actually ever use the user’s token for +anything other than logging them in. However to query team membership on your +behalf, we now require +[the `read:org` scope](https://developer.github.com/v3/oauth/#scopes). + +You are free to deny us this scope, and everything that worked before teams +were introduced will keep working. However you will never be able to add a team +as an owner, or publish a crate as a team owner. If you ever attempt to do this, +you will get the error above. You may also see this error if you ever try to +publish a crate that you don’t own at all, but otherwise happens to have a team. + +If you ever change your mind, or just aren’t sure if [crates.io] has sufficient +permission, you can always go to https://crates.io/login, which will prompt you +for permission if [crates.io] doesn’t have all the scopes it would like to. + +An additional barrier to querying GitHub is that the organization may be +actively denying third party access. To check this, you can go to: + + https://github.com/organizations/:org/settings/oauth_application_policy + +where `:org` is the name of the organization (e.g. rust-lang). You may see +something like: + +![Organization Access Control](images/org-level-acl.png) + +Where you may choose to explicitly remove [crates.io] from your organization’s +blacklist, or simply press the “Remove Restrictions” button to allow all third +party applications to access this data. + +Alternatively, when [crates.io] requested the `read:org` scope, you could have +explicitly whitelisted [crates.io] querying the org in question by pressing +the “Grant Access” button next to its name: + +![Authentication Access Control](images/auth-level-acl.png) + +[crates.io]: https://crates.io/ diff --git a/src/doc/src/reference/source-replacement.md b/src/doc/src/reference/source-replacement.md new file mode 100644 index 000000000..20099b29c --- /dev/null +++ b/src/doc/src/reference/source-replacement.md @@ -0,0 +1,134 @@ +## Source Replacement + +Cargo supports the ability to **replace one source with another** to express +strategies along the lines of mirrors or vendoring dependencies. Configuration +is currently done through the [`.cargo/config` configuration][config] mechanism, +like so: + +[config]: reference/config.html + +```toml +# The `source` table is where all keys related to source-replacement +# are stored. +[source] + +# Under the `source` table are a number of other tables whose keys are a +# name for the relevant source. For example this section defines a new +# source, called `my-awesome-source`, which comes from a directory +# located at `vendor` relative to the directory containing this `.cargo/config` +# file +[source.my-awesome-source] +directory = "vendor" + +# Git sources can optionally specify a branch/tag/rev as well +git = "https://example.com/path/to/repo" +# branch = "master" +# tag = "v1.0.1" +# rev = "313f44e8" + +# The crates.io default source for crates is available under the name +# "crates-io", and here we use the `replace-with` key to indicate that it's +# replaced with our source above. +[source.crates-io] +replace-with = "my-awesome-source" +``` + +With this configuration Cargo attempts to look up all crates in the directory +"vendor" rather than querying the online registry at crates.io. Using source +replacement Cargo can express: + +* Vendoring - custom sources can be defined which represent crates on the local + filesystem. These sources are subsets of the source that they're replacing and + can be checked into projects if necessary. + +* Mirroring - sources can be replaced with an equivalent version which acts as a + cache for crates.io itself. + +Cargo has a core assumption about source replacement that the source code is +exactly the same from both sources. In our above example Cargo assumes that all +of the crates coming from `my-awesome-source` are the exact same as the copies +from `crates-io`. Note that this also means that `my-awesome-source` is not +allowed to have crates which are not present in the `crates-io` source. + +As a consequence, source replacement is not appropriate for situations such as +patching a dependency or a private registry. Cargo supports patching +dependencies through the usage of [the `[replace]` key][replace-section], and +private registry support is planned for a future version of Cargo. + +[replace-section]: reference/manifest.html#the-replace-section + +### Configuration + +Configuration of replacement sources is done through [`.cargo/config`][config] +and the full set of available keys are: + +```toml +# Each source has its own table where the key is the name of the source +[source.the-source-name] + +# Indicate that `the-source-name` will be replaced with `another-source`, +# defined elsewhere +replace-with = "another-source" + +# Available kinds of sources that can be specified (described below) +registry = "https://example.com/path/to/index" +local-registry = "path/to/registry" +directory = "path/to/vendor" +``` + +The `crates-io` represents the crates.io online registry (default source of +crates) and can be replaced with: + +```toml +[source.crates-io] +replace-with = 'another-source' +``` + +### Registry Sources + +A "registry source" is one that is the same as crates.io itself. That is, it has +an index served in a git repository which matches the format of the +[crates.io index](https://github.com/rust-lang/crates.io-index). That repository +then has configuration indicating where to download crates from. + +Currently there is not an already-available project for setting up a mirror of +crates.io. Stay tuned though! + +### Local Registry Sources + +A "local registry source" is intended to be a subset of another registry +source, but available on the local filesystem (aka vendoring). Local registries +are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are +made up of a set of `*.crate` files and an index like the normal registry is. + +The primary way to manage and crate local registry sources is through the +[`cargo-local-registry`][cargo-local-registry] subcommand, available on +crates.io and can be installed with `cargo install cargo-local-registry`. + +[cargo-local-registry]: https://crates.io/crates/cargo-local-registry + +Local registries are contained within one directory and contain a number of +`*.crate` files downloaded from crates.io as well as an `index` directory with +the same format as the crates.io-index project (populated with just entries for +the crates that are present). + +### Directory Sources + +A "directory source" is similar to a local registry source where it contains a +number of crates available on the local filesystem, suitable for vendoring +dependencies. Also like local registries, directory sources can primarily be +managed by an external subcommand, [`cargo-vendor`][cargo-vendor], which can be +installed with `cargo install cargo-vendor`. + +[cargo-vendor]: https://crates.io/crates/cargo-vendor + +Directory sources are distinct from local registries though in that they contain +the unpacked version of `*.crate` files, making it more suitable in some +situations to check everything into source control. A directory source is just a +directory containing a number of other directories which contain the source code +for crates (the unpacked version of `*.crate` files). Currently no restriction +is placed on the name of each directory. + +Each crate in a directory source also has an associated metadata file indicating +the checksum of each file in the crate to protect against accidental +modifications. diff --git a/src/doc/src/reference/specifying-dependencies.md b/src/doc/src/reference/specifying-dependencies.md new file mode 100644 index 000000000..b7c780a1b --- /dev/null +++ b/src/doc/src/reference/specifying-dependencies.md @@ -0,0 +1,536 @@ +## Specifying Dependencies + +Your crates can depend on other libraries from [crates.io], `git` repositories, or +subdirectories on your local file system. You can also temporarily override the +location of a dependency— for example, to be able to test out a bug fix in the +dependency that you are working on locally. You can have different +dependencies for different platforms, and dependencies that are only used during +development. Let's take a look at how to do each of these. + +### Specifying dependencies from crates.io + +Cargo is configured to look for dependencies on [crates.io] by default. Only +the name and a version string are required in this case. In [the cargo +guide](guide/index.html), we specified a dependency on the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The string `"0.1.12"` is a [semver] version requirement. Since this +string does not have any operators in it, it is interpreted the same way as +if we had specified `"^0.1.12"`, which is called a caret requirement. + +[semver]: https://github.com/steveklabnik/semver#requirements + +### Caret requirements + +**Caret requirements** allow SemVer compatible updates to a specified version. +An update is allowed if the new version number does not modify the left-most +non-zero digit in the major, minor, patch grouping. In this case, if we ran +`cargo update -p time`, cargo should update us to version `0.1.13` if it is the +latest `0.1.z` release, but would not update us to `0.2.0`. If instead we had +specified the version string as `^1.0`, cargo should update to `1.1` if it is +the latest `1.y` release, but not `2.0`. The version `0.0.x` is not considered +compatible with any other version. + +Here are some more examples of caret requirements and the versions that would +be allowed with them: + +```notrust +^1.2.3 := >=1.2.3 <2.0.0 +^1.2 := >=1.2.0 <2.0.0 +^1 := >=1.0.0 <2.0.0 +^0.2.3 := >=0.2.3 <0.3.0 +^0.2 := >= 0.2.0 < 0.3.0 +^0.0.3 := >=0.0.3 <0.0.4 +^0.0 := >=0.0.0 <0.1.0 +^0 := >=0.0.0 <1.0.0 +``` + +This compatibility convention is different from SemVer in the way it treats +versions before 1.0.0. While SemVer says there is no compatibility before +1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z` +and `x > 0`. + +### Tilde requirements + +**Tilde requirements** specify a minimal version with some ability to update. +If you specify a major, minor, and patch version or only a major and minor +version, only patch-level changes are allowed. If you only specify a major +version, then minor- and patch-level changes are allowed. + +`~1.2.3` is an example of a tilde requirement. + +```notrust +~1.2.3 := >=1.2.3 <1.3.0 +~1.2 := >=1.2.0 <1.3.0 +~1 := >=1.0.0 <2.0.0 +``` + +### Wildcard requirements + +**Wildcard requirements** allow for any version where the wildcard is +positioned. + +`*`, `1.*` and `1.2.*` are examples of wildcard requirements. + +```notrust +* := >=0.0.0 +1.* := >=1.0.0 <2.0.0 +1.2.* := >=1.2.0 <1.3.0 +``` + +### Inequality requirements + +**Inequality requirements** allow manually specifying a version range or an +exact version to depend on. + +Here are some examples of inequality requirements: + +```notrust +>= 1.2.0 +> 1 +< 2 += 1.2.3 +``` + +### Multiple requirements + +Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, +< 1.5`. + +### Specifying dependencies from `git` repositories + +To depend on a library located in a `git` repository, the minimum information +you need to specify is the location of the repository with the `git` key: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand" } +``` + +Cargo will fetch the `git` repository at this location then look for a +`Cargo.toml` for the requested crate anywhere inside the `git` repository +(not necessarily at the root). + +Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our project. +You can combine the `git` key with the `rev`, `tag`, or `branch` keys to +specify something else. Here's an example of specifying that you want to use +the latest commit on a branch named `next`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand", branch = "next" } +``` + +### Specifying path dependencies + +Over time, our `hello_world` project from [the guide](guide/index.html) has +grown significantly in size! It’s gotten to the point that we probably want to +split out a separate crate for others to use. To do this Cargo supports **path +dependencies** which are typically sub-crates that live within one repository. +Let’s start off by making a new crate inside of our `hello_world` project: + +```console +# inside of hello_world/ +$ cargo new hello_utils +``` + +This will create a new folder `hello_utils` inside of which a `Cargo.toml` and +`src` folder are ready to be configured. In order to tell Cargo about this, open +up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: + +```toml +[dependencies] +hello_utils = { path = "hello_utils" } +``` + +This tells Cargo that we depend on a crate called `hello_utils` which is found +in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). + +And that’s it! The next `cargo build` will automatically build `hello_utils` and +all of its own dependencies, and others can also start using the crate as well. +However, crates that use dependencies specified with only a path are not +permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we +would need to publish a version of `hello_utils` to [crates.io](https://crates.io) +and specify its version in the dependencies line as well: + +```toml +[dependencies] +hello_utils = { path = "hello_utils", version = "0.1.0" } +``` + +### Overriding dependencies + +There are a number of methods in Cargo to support overriding dependencies and +otherwise controlling the dependency graph. These options are typically, though, +only available at the workspace level and aren't propagated through +dependencies. In other words, "applications" have the ability to override +dependencies but "libraries" do not. + +The desire to override a dependency or otherwise alter some dependencies can +arise through a number of scenarios. Most of them, however, boil down to the +ability to work with a crate before it's been published to crates.io. For +example: + +* A crate you're working on is also used in a much larger application you're + working on, and you'd like to test a bug fix to the library inside of the + larger application. +* An upstream crate you don't work on has a new feature or a bug fix on the + master branch of its git repository which you'd like to test out. +* You're about to publish a new major version of your crate, but you'd like to + do integration testing across an entire project to ensure the new major + version works. +* You've submitted a fix to an upstream crate for a bug you found, but you'd + like to immediately have your application start depending on the fixed version + of the crate to avoid blocking on the bug fix getting merged. + +These scenarios are currently all solved with the [`[patch]` manifest +section][patch-section]. Historically some of these scenarios have been solved +with [the `[replace]` section][replace-section], but we'll document the `[patch]` +section here. + +[patch-section]: reference/manifest.html#the-patch-section +[replace-section]: reference/manifest.html#the-replace-section + +### Testing a bugfix + +Let's say you're working with the [`uuid`] crate but while you're working on it +you discover a bug. You are, however, quite enterprising so you decide to also +try out to fix the bug! Originally your manifest will look like: + +[`uuid`](https://crates.io/crates/uuid) + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0" +``` + +First thing we'll do is to clone the [`uuid` repository][uuid-repository] +locally via: + +```console +$ git clone https://github.com/rust-lang-nursery/uuid +``` + +Next we'll edit the manifest of `my-library` to contain: + +```toml +[patch.crates-io] +uuid = { path = "../path/to/uuid" } +``` + +Here we declare that we're *patching* the source `crates-io` with a new +dependency. This will effectively add the local checked out version of `uuid` to +the crates.io registry for our local project. + +Next up we need to ensure that our lock file is updated to use this new version +of `uuid` so our project uses the locally checked out copy instead of one from +crates.io. The way `[patch]` works is that it'll load the dependency at +`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` +it'll *also* return the local version. + +This means that the version number of the local checkout is significant and will +affect whether the patch is used. Our manifest declared `uuid = "1.0"` which +means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution +algorithm also means that we'll resolve to the maximum version within that +range. Typically this doesn't matter as the version of the git repository will +already be greater or match the maximum version published on crates.io, but it's +important to keep this in mind! + +In any case, typically all you need to do now is: + +```console +$ cargo build + Compiling uuid v1.0.0 (file://.../uuid) + Compiling my-library v0.1.0 (file://.../my-library) + Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs +``` + +And that's it! You're now building with the local version of `uuid` (note the +`file://` in the build output). If you don't see the `file://` version getting +built then you may need to run `cargo update -p uuid --precise $version` where +`$version` is the version of the locally checked out copy of `uuid`. + +Once you've fixed the bug you originally found the next thing you'll want to do +is to likely submit that as a pull request to the `uuid` crate itself. Once +you've done this then you can also update the `[patch]` section. The listing +inside of `[patch]` is just like the `[dependencies]` section, so once your pull +request is merged you could change your `path` dependency to: + +```toml +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +[uuid-repository]: https://github.com/rust-lang-nursery/uuid + +### Working with an unpublished minor version + +Let's now shift gears a bit from bug fixes to adding features. While working on +`my-library` you discover that a whole new feature is needed in the `uuid` +crate. You've implemented this feature, tested it locally above with `[patch]`, +and submitted a pull request. Let's go over how you continue to use and test it +before it's actually published. + +Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but +since then the master branch of the git repository has updated to `1.0.1`. This +branch includes your new feature you submitted previously. To use this +repository we'll edit our `Cargo.toml` to look like + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0.1" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Note that our local dependency on `uuid` has been updated to `1.0.1` as it's +what we'll actually require once the crate is published. This version doesn't +exist on crates.io, though, so we provide it with the `[patch]` section of the +manifest. + +Now when our library is built it'll fetch `uuid` from the git repository and +resolve to 1.0.1 inside the repository instead of trying to download a version +from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can +be deleted. + +It's also worth noting that `[patch]` applies *transitively*. Let's say you use +`my-library` in a larger project, such as: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Remember that `[patch]` is applicable *transitively* but can only be defined at +the *top level* so we consumers of `my-library` have to repeat the `[patch]` section +if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on +`uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to +one version for this entire crate graph, 1.0.1, and it'll be pulled from the git +repository. + +#### Overriding repository URL + +In case the dependency you want to override isn't loaded from `crates.io`, you'll have to change a bit how you use `[patch]`: + +```toml +[patch."https://github.com/your/repository"] +my-library = { path = "../my-library/path" } +``` + +And that's it! + +### Prepublishing a breaking change + +As a final scenario, let's take a look at working with a new major version of a +crate, typically accompanied with breaking changes. Sticking with our previous +crates, this means that we're going to be creating version 2.0.0 of the `uuid` +crate. After we've submitted all changes upstream we can update our manifest for +`my-library` to look like: + +```toml +[dependencies] +uuid = "2.0" + +[patch.crates-io] +uuid = { git = "https://github.com/rust-lang-nursery/uuid", branch = "2.0.0" } +``` + +And that's it! Like with the previous example the 2.0.0 version doesn't actually +exist on crates.io but we can still put it in through a git dependency through +the usage of the `[patch]` section. As a thought exercise let's take another +look at the `my-binary` manifest from above again as well: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid', version = '2.0.0' } +``` + +Note that this will actually resolve to two versions of the `uuid` crate. The +`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but +the `my-library` crate will use the 2.0.0 version of `uuid`. This will allow you +to gradually roll out breaking changes to a crate through a dependency graph +without being force to update everything all at once. + +### Overriding with local dependencies + +Sometimes you're only temporarily working on a crate and you don't want to have +to modify `Cargo.toml` like with the `[patch]` section above. For this use +case Cargo offers a much more limited version of overrides called **path +overrides**. + +Path overrides are specified through `.cargo/config` instead of `Cargo.toml`, +and you can find [more documentation about this configuration][config-docs]. +Inside of `.cargo/config` you'll specify a key called `paths`: + +[config-docs]: reference/config.html + +```toml +paths = ["/path/to/uuid"] +``` + +This array should be filled with directories that contain a `Cargo.toml`. In +this instance, we’re just adding `uuid`, so it will be the only one that’s +overridden. This path can be either absolute or relative to the directory that +contains the `.cargo` folder. + +Path overrides are more restricted than the `[patch]` section, however, in +that they cannot change the structure of the dependency graph. When a +path replacement is used then the previous set of dependencies +must all match exactly to the new `Cargo.toml` specification. For example this +means that path overrides cannot be used to test out adding a dependency to a +crate, instead `[patch]` must be used in that situation. As a result usage of a +path override is typically isolated to quick bug fixes rather than larger +changes. + +Note: using a local configuration to override paths will only work for crates +that have been published to [crates.io]. You cannot use this feature to tell +Cargo how to find local unpublished crates. + +### Platform specific dependencies + + +Platform-specific dependencies take the same format, but are listed under a +`target` section. Normally Rust-like `#[cfg]` syntax will be used to define +these sections: + +```toml +[target.'cfg(windows)'.dependencies] +winhttp = "0.4.0" + +[target.'cfg(unix)'.dependencies] +openssl = "1.0.1" + +[target.'cfg(target_arch = "x86")'.dependencies] +native = { path = "native/i686" } + +[target.'cfg(target_arch = "x86_64")'.dependencies] +native = { path = "native/x86_64" } +``` + +Like with Rust, the syntax here supports the `not`, `any`, and `all` operators +to combine various cfg name/value pairs. Note that the `cfg` syntax has only +been available since Cargo 0.9.0 (Rust 1.8.0). + +In addition to `#[cfg]` syntax, Cargo also supports listing out the full target +the dependencies would apply to: + +```toml +[target.x86_64-pc-windows-gnu.dependencies] +winhttp = "0.4.0" + +[target.i686-unknown-linux-gnu.dependencies] +openssl = "1.0.1" +``` + +If you’re using a custom target specification, quote the full path and file +name: + +```toml +[target."x86_64/windows.json".dependencies] +winhttp = "0.4.0" + +[target."i686/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/i686" } + +[target."x86_64/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/x86_64" } +``` + +### Development dependencies + +You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format +is equivalent to `[dependencies]`: + +```toml +[dev-dependencies] +tempdir = "0.3" +``` + +Dev-dependencies are not used when compiling +a package for building, but are used for compiling tests, examples, and +benchmarks. + +These dependencies are *not* propagated to other packages which depend on this +package. + +You can also have target-specific development dependencies by using +`dev-dependencies` in the target section header instead of `dependencies`. For +example: + +```toml +[target.'cfg(unix)'.dev-dependencies] +mio = "0.0.1" +``` + +[crates.io]: https://crates.io/ + +### Build dependencies + +You can depend on other Cargo-based crates for use in your build scripts. +Dependencies are declared through the `build-dependencies` section of the +manifest: + +```toml +[build-dependencies] +gcc = "0.3" +``` + +The build script **does not** have access to the dependencies listed +in the `dependencies` or `dev-dependencies` section. Build +dependencies will likewise not be available to the package itself +unless listed under the `dependencies` section as well. A package +itself and its build script are built separately, so their +dependencies need not coincide. Cargo is kept simpler and cleaner by +using independent dependencies for independent purposes. + +### Choosing features + +If a package you depend on offers conditional features, you can +specify which to use: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +More information about features can be found in the +[manifest documentation](reference/manifest.html#the-features-section). diff --git a/src/doc/theme/favicon.png b/src/doc/theme/favicon.png new file mode 100644 index 000000000..a91ad692c Binary files /dev/null and b/src/doc/theme/favicon.png differ diff --git a/src/etc/_cargo b/src/etc/_cargo new file mode 100644 index 000000000..cb0cf8f5f --- /dev/null +++ b/src/etc/_cargo @@ -0,0 +1,544 @@ +#compdef cargo + +autoload -U regexp-replace + +zstyle -T ':completion:*:*:cargo:*' tag-order && \ + zstyle ':completion:*:*:cargo:*' tag-order 'common-commands' + +_cargo() { +local context state state_descr line +typeset -A opt_args + +# leading items in parentheses are an exclusion list for the arguments following that arg +# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions +# - => exclude all other options +# 1 => exclude positional arg 1 +# * => exclude all other args +# +blah => exclude +blah +_arguments \ + '(- 1 *)'{-h,--help}'[show help message]' \ + '(- 1 *)--list[list installed commands]' \ + '(- 1 *)'{-V,--version}'[show version information]' \ + {-v,--verbose}'[use verbose output]' \ + --color'[colorization option]' \ + '(+beta +nightly)+stable[use the stable toolchain]' \ + '(+stable +nightly)+beta[use the beta toolchain]' \ + '(+stable +beta)+nightly[use the nightly toolchain]' \ + '1: :->command' \ + '*:: :->args' + +case $state in + command) + _alternative 'common-commands:common:_cargo_cmds' 'all-commands:all:_cargo_all_cmds' + ;; + + args) + case $words[1] in + bench) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + build) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + check) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not check the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to check]:packages:_get_package_names' \ + '--release=[check in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + clean) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[whether or not to clean release artifacts]' \ + '--target=[target triple(default:all)]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + doc) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-deps[do not build docs for dependencies]' \ + '--no-default-features[do not build the default features]' \ + '--open[open docs in browser after the build]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + fetch) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + generate-lockfile) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + git-checkout) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--reference=[REF]' \ + '--url=[URL]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + help) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '*: :_cargo_cmds' \ + ;; + + init) + _arguments \ + '--bin[use binary template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + install) + _arguments \ + '--bin=[only install the specified binary]' \ + '--branch=[branch to use when installing from git]' \ + '--color=:colorization option:(auto always never)' \ + '--debug[build in debug mode instead of release mode]' \ + '--example[install the specified example instead of binaries]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--git=[URL from which to install the crate]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--no-default-features[do not build the default features]' \ + '--path=[local filesystem path to crate to install]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--rev=[specific commit to use when installing from git]' \ + '--root=[directory to install packages into]: :_files -/' \ + '--tag=[tag to use when installing from git]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--vers=[version to install from crates.io]' \ + ;; + + locate-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + ;; + + login) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + metadata) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "--no-deps[output information only about the root package and don't fetch dependencies]" \ + '--no-default-features[do not include the default feature]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--format-version=[format version(default: 1)]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + new) + _arguments \ + '--bin[use binary template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + owner) + _arguments \ + '(-a, --add)'{-a,--add}'[add owner LOGIN]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-l, --list)'{-l,--list}'[list owners of a crate]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \ + '--token[API token to use when authenticating]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + package) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-l, --list)'{-l,--list}'[print files included in a package without making one]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ + '--no-verify[do not build to verify contents]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + pkgid) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + publish) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-verify[Do not verify tarball until before publish]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[token to use when uploading]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + read-manifest) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + run) + _arguments \ + '--example=[name of the bin target]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--bin=[name of the bin target]' \ + '--no-default-features[do not build the default features]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '*: :_normal' \ + ;; + + rustc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[features to compile for the package]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to fetch dependencies for]: :_files -/' \ + '--no-default-features[do not compile default features for the package]' \ + '(-p, --package)'{-p,--package}'=[profile to compile for]' \ + '--profile=[profile to build the selected target for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple which compiles will be for]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + rustdoc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[space-separated list of features to also build]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to document]: :_files -/' \ + '--no-default-features[do not build the `default` feature]' \ + '--open[open the docs in a browser after the operation]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + search) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[host of a registry to search in]' \ + '--limit=[limit the number of results]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + test) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--test=[test name]: :_test_names' \ + '--no-default-features[do not build the default features]' \ + '--no-fail-fast[run all tests regardless of failure]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '1: :_test_names' \ + '(--doc --bin --example --test --bench)--lib[only test library]' \ + '(--lib --bin --example --test --bench)--doc[only test documentation]' \ + '(--lib --doc --example --test --bench)--bin=[binary name]' \ + '(--lib --doc --bin --test --bench)--example=[example name]' \ + '(--lib --doc --bin --example --bench)--test=[test name]' \ + '(--lib --doc --bin --example --test)--bench=[benchmark name]' \ + '--message-format:error format:(human json)' \ + '--frozen[require lock and cache up to date]' \ + '--locked[require lock up to date]' + ;; + + uninstall) + _arguments \ + '--bin=[only uninstall the binary NAME]' \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[less output printed to stdout]' \ + '--root=[directory to uninstall packages from]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + update) + _arguments \ + '--aggressive=[force dependency update]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to update]:packages:__get_package_names' \ + '--precise=[update single dependency to PRECISE]: :' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + verify-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + version) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + yank) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[API token to use when authenticating]' \ + '--undo[undo a yank, putting a version back into the index]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '--vers[yank version]' \ + ;; + esac + ;; +esac +} + +_cargo_cmds(){ +local -a commands;commands=( +'bench:execute all benchmarks of a local package' +'build:compile the current project' +'check:check the current project without compiling' +'clean:remove generated artifacts' +'doc:build package documentation' +'fetch:fetch package dependencies' +'generate-lockfile:create lockfile' +'git-checkout:git checkout' +'help:get help for commands' +'init:create new project in current directory' +'install:install a Rust binary' +'locate-project:print "Cargo.toml" location' +'login:login to remote server' +'metadata:the metadata for a project in json' +'new:create a new project' +'owner:manage the owners of a crate on the registry' +'package:assemble local package into a distributable tarball' +'pkgid:print a fully qualified package specification' +'publish:upload package to the registry' +'read-manifest:print manifest in JSON format' +'run:run the main binary of the local package' +'rustc:compile a package and all of its dependencies' +'rustdoc:build documentation for a package' +'search:search packages on crates.io' +'test:execute all unit and tests of a local package' +'uninstall:remove a Rust binary' +'update:update dependencies' +'verify-project:check Cargo.toml' +'version:show version information' +'yank:remove pushed file from index' +) +_describe -t common-commands 'common commands' commands +} + +_cargo_all_cmds(){ +local -a commands;commands=($(cargo --list)) +_describe -t all-commands 'all commands' commands +} + + +#FIXME: Disabled until fixed +#gets package names from the manifest file +_get_package_names() +{ +} + +#TODO:see if it makes sense to have 'locate-project' to have non-json output. +#strips package name from json stuff +_locate_manifest(){ +local manifest=`cargo locate-project 2>/dev/null` +regexp-replace manifest '\{"root":"|"\}' '' +echo $manifest +} + +# Extracts the values of "name" from the array given in $1 and shows them as +# command line options for completion +_get_names_from_array() +{ + local -a filelist; + local manifest=$(_locate_manifest) + if [[ -z $manifest ]]; then + return 0 + fi + + local last_line + local -a names; + local in_block=false + local block_name=$1 + names=() + while read line + do + if [[ $last_line == "[[$block_name]]" ]]; then + in_block=true + else + if [[ $last_line =~ '.*\[\[.*' ]]; then + in_block=false + fi + fi + + if [[ $in_block == true ]]; then + if [[ $line =~ '.*name.*=' ]]; then + regexp-replace line '^.*name *= *|"' "" + names+=$line + fi + fi + + last_line=$line + done < $manifest + _describe $block_name names + +} + +#Gets the test names from the manifest file +_test_names() +{ + _get_names_from_array "test" +} + +#Gets the bench names from the manifest file +_benchmark_names() +{ + _get_names_from_array "bench" +} + +# These flags are mutually exclusive specifiers for the scope of a command; as +# they are used in multiple places without change, they are expanded into the +# appropriate command's `_arguments` where appropriate. +set command_scope_spec +command_scope_spec=( + '(--bin --example --test --lib)--bench=[benchmark name]: :_benchmark_names' + '(--bench --bin --test --lib)--example=[example name]' + '(--bench --example --test --lib)--bin=[binary name]' + '(--bench --bin --example --test)--lib=[library name]' + '(--bench --bin --example --lib)--test=[test name]' +) + +_cargo diff --git a/src/etc/cargo.bashcomp.sh b/src/etc/cargo.bashcomp.sh new file mode 100644 index 000000000..bed72cc80 --- /dev/null +++ b/src/etc/cargo.bashcomp.sh @@ -0,0 +1,246 @@ +command -v cargo >/dev/null 2>&1 && +_cargo() +{ + local cur prev words cword + _get_comp_words_by_ref cur prev words cword + + COMPREPLY=() + + # Skip past - and + options to find the command. + local nwords=${#words[@]} + local cmd_i cmd + for (( cmd_i=1; cmd_i<$nwords; cmd_i++ )); + do + if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then + cmd="${words[$cmd_i]}" + break + fi + done + + local vcs='git hg none' + local color='auto always never' + local msg_format='human json' + + local opt_help='-h --help' + local opt_verbose='-v --verbose' + local opt_quiet='-q --quiet' + local opt_color='--color' + local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" + local opt_pkg='-p --package' + local opt_feat='--features --all-features --no-default-features' + local opt_mani='--manifest-path' + local opt_jobs='-j --jobs' + local opt_force='-f --force' + local opt_test='--test --bench' + local opt_lock='--frozen --locked' + + local opt___nocmd="$opt_common -V --version --list" + local opt__bench="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --no-run" + local opt__build="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release" + local opt__check="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release" + local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release" + local opt__doc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --bin --lib --target --open --no-deps --release" + local opt__fetch="$opt_common $opt_mani $opt_lock" + local opt__generate_lockfile="${opt__fetch}" + local opt__git_checkout="$opt_common $opt_lock --reference --url" + local opt__help="$opt_help" + local opt__init="$opt_common $opt_lock --bin --lib --name --vcs" + local opt__install="$opt_common $opt_feat $opt_jobs $opt_lock $opt_force --bin --branch --debug --example --git --list --path --rev --root --tag --vers" + local opt__locate_project="$opt_mani -h --help" + local opt__login="$opt_common $opt_lock --host" + local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version --no-deps" + local opt__new="$opt_common $opt_lock --vcs --bin --lib --name" + local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token" + local opt__package="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty -l --list --no-verify --no-metadata" + local opt__pkgid="${opt__fetch} $opt_pkg" + local opt__publish="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty --dry-run --host --token --no-verify" + local opt__read_manifest="$opt_help $opt_verbose $opt_mani $opt_color --no-deps" + local opt__run="$opt_common $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --target --bin --example --release" + local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --profile --target --lib --bin --example --release" + local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release --open" + local opt__search="$opt_common $opt_lock --host --limit" + local opt__test="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --all --doc --target --lib --bin --example --no-run --release --no-fail-fast" + local opt__uninstall="$opt_common $opt_lock --bin --root" + local opt__update="$opt_common $opt_pkg $opt_mani $opt_lock --aggressive --precise" + local opt__verify_project="${opt__fetch}" + local opt__version="$opt_help $opt_verbose $opt_color" + local opt__yank="$opt_common $opt_lock --vers --undo --index --token" + + if [[ $cmd_i -ge $nwords-1 ]]; then + # Completion before or at the command. + if [[ "$cur" == -* ]]; then + COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) + elif [[ "$cur" == +* ]]; then + COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) ) + else + COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) + fi + else + case "${prev}" in + --vcs) + COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) + ;; + --color) + COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) + ;; + --message-format) + COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) ) + ;; + --manifest-path) + _filedir toml + ;; + --bin) + COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) + ;; + --test) + COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) + ;; + --bench) + COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) + ;; + --example) + COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) + ;; + --target) + COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) + ;; + help) + COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) + ;; + *) + local opt_var=opt__${cmd//-/_} + COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) + ;; + esac + fi + + # compopt does not work in bash version 3 + + return 0 +} && +complete -F _cargo cargo + +__cargo_commands=$(cargo --list 2>/dev/null | tail -n +2) + +_locate_manifest(){ + local manifest=`cargo locate-project 2>/dev/null` + # regexp-replace manifest '\{"root":"|"\}' '' + echo ${manifest:9:-2} +} + +# Extracts the values of "name" from the array given in $1 and shows them as +# command line options for completion +_get_names_from_array() +{ + local manifest=$(_locate_manifest) + if [[ -z $manifest ]]; then + return 0 + fi + + local last_line + local -a names + local in_block=false + local block_name=$1 + while read line + do + if [[ $last_line == "[[$block_name]]" ]]; then + in_block=true + else + if [[ $last_line =~ .*\[\[.* ]]; then + in_block=false + fi + fi + + if [[ $in_block == true ]]; then + if [[ $line =~ .*name.*\= ]]; then + line=${line##*=} + line=${line%%\"} + line=${line##*\"} + names+=($line) + fi + fi + + last_line=$line + done < $manifest + echo "${names[@]}" +} + +#Gets the bin names from the manifest file +_bin_names() +{ + _get_names_from_array "bin" +} + +#Gets the test names from the manifest file +_test_names() +{ + _get_names_from_array "test" +} + +#Gets the bench names from the manifest file +_benchmark_names() +{ + _get_names_from_array "bench" +} + +_get_examples(){ + local files=($(dirname $(_locate_manifest))/examples/*.rs) + local names=("${files[@]##*/}") + local names=("${names[@]%.*}") + # "*" means no examples found + if [[ "${names[@]}" != "*" ]]; then + echo "${names[@]}" + fi +} + +_get_targets(){ + local CURRENT_PATH + if [ `uname -o` == "Cygwin" -a -f "$PWD"/Cargo.toml ]; then + CURRENT_PATH=$PWD + else + CURRENT_PATH=$(_locate_manifest) + fi + if [[ -z "$CURRENT_PATH" ]]; then + return 1 + fi + local TARGETS=() + local FIND_PATHS=( "/" ) + local FIND_PATH LINES LINE + while [[ "$CURRENT_PATH" != "/" ]]; do + FIND_PATHS+=( "$CURRENT_PATH" ) + CURRENT_PATH=$(dirname $CURRENT_PATH) + done + for FIND_PATH in ${FIND_PATHS[@]}; do + if [[ -f "$FIND_PATH"/.cargo/config ]]; then + LINES=( `grep "$FIND_PATH"/.cargo/config -e "^\[target\."` ) + for LINE in ${LINES[@]}; do + TARGETS+=(`sed 's/^\[target\.\(.*\)\]$/\1/' <<< $LINE`) + done + fi + done + echo "${TARGETS[@]}" +} + +_toolchains(){ + local result=() + local toolchains=$(rustup toolchain list) + local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]" + local date="[0-9]{4}-[0-9]{2}-[0-9]{2}" + while read line + do + # Strip " (default)" + line=${line%% *} + if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then + if [[ -z ${BASH_REMATCH[3]} ]]; then + result+=("+${BASH_REMATCH[1]}") + else + # channel-date + result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}") + fi + result+=("+$line") + fi + done <<< "$toolchains" + echo "${result[@]}" +} + +# vim:ft=sh diff --git a/src/etc/man/cargo-bench.1 b/src/etc/man/cargo-bench.1 new file mode 100644 index 000000000..dfb9ee4ec --- /dev/null +++ b/src/etc/man/cargo-bench.1 @@ -0,0 +1,143 @@ +.TH "CARGO\-BENCH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-bench \- Execute benchmarks of a package +.SH SYNOPSIS +.PP +\f[I]cargo bench\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Execute all benchmarks of a local package. +.PP +All of the trailing arguments are passed to the benchmark binaries +generated for filtering benchmarks and generally providing options +configuring how they run. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +The \f[B]\-\-jobs\f[] argument affects the building of the benchmark +executable but does not affect how many jobs are used when running the +benchmarks. +.PP +Compilation can be customized with the \[aq]bench\[aq] profile in the +manifest. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-lib +Benchmark only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Benchmark only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Benchmark only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Benchmark only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Benchmark only the specified bench target. +.RS +.RE +.TP +.B \-\-no\-run +Compile, but don\[aq]t run benchmarks. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to benchmarks for. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Execute all the benchmarks of the current package +.IP +.nf +\f[C] +$\ cargo\ bench +\f[] +.fi +.PP +Execute the BENCH benchmark +.IP +.nf +\f[C] +$\ cargo\ bench\ \-\-bench\ BENCH +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1), cargo\-test(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-build.1 b/src/etc/man/cargo-build.1 new file mode 100644 index 000000000..18c16c63d --- /dev/null +++ b/src/etc/man/cargo-build.1 @@ -0,0 +1,132 @@ +.TH "CARGO\-BUILD" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-build \- Compile the current project +.SH SYNOPSIS +.PP +\f[I]cargo build\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Compile a local package and all of its dependencies. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +Compilation can be configured via the use of profiles which are +configured in the manifest. +The default profile for this command is \f[I]dev\f[], but passing the +\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to build. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ build +\f[] +.fi +.PP +Build a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ build\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-check.1 b/src/etc/man/cargo-check.1 new file mode 100644 index 000000000..0931bf0e9 --- /dev/null +++ b/src/etc/man/cargo-check.1 @@ -0,0 +1,132 @@ +.TH "CARGO\-CHECK" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-check \- Check the current project +.SH SYNOPSIS +.PP +\f[I]cargo check\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Check a local package and all of its dependencies. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be checked. +If it is not given, then the current package is checked. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.PP +Compilation can be configured via the use of profiles which are +configured in the manifest. +The default profile for this command is \f[I]dev\f[], but passing the +\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to check. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Check only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Check only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Check only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Check only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Check only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Check artifacts in release mode. +.RS +.RE +.TP +.B \-\-all\-features +Check with all available features. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also check. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not check the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Check for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Check a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ check +\f[] +.fi +.PP +Check a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ check\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-clean.1 b/src/etc/man/cargo-clean.1 new file mode 100644 index 000000000..6777c984f --- /dev/null +++ b/src/etc/man/cargo-clean.1 @@ -0,0 +1,82 @@ +.TH "CARGO\-CLEAN" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-clean \- Remove generated artifacts +.SH SYNOPSIS +.PP +\f[I]cargo clean\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Remove artifacts that cargo has generated in the past. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to clean artifacts for. +.RS +.RE +.TP +.B \-\-manifest\-path PATH +Path to the manifest to the package to clean. +.RS +.RE +.TP +.B \-\-target TRIPLE +Target triple to clean output for (default all). +.RS +.RE +.TP +.B \-\-release +Whether or not to clean release artifacts. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Remove local package generated artifacts +.IP +.nf +\f[C] +$\ cargo\ clean +\f[] +.fi +.PP +Clean release artifacts +.IP +.nf +\f[C] +$\ cargo\ clean\ \-\-release +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-doc.1 b/src/etc/man/cargo-doc.1 new file mode 100644 index 000000000..f910957c6 --- /dev/null +++ b/src/etc/man/cargo-doc.1 @@ -0,0 +1,109 @@ +.TH "CARGO\-DOC" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-doc \- Build a package\[aq]s documentation +.SH SYNOPSIS +.PP +\f[I]cargo doc\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Build a package\[aq]s documentation. +.PP +By default the documentation for the local package and all dependencies +is built. +The output is all placed in \[aq]target/doc\[aq] in rustdoc\[aq]s usual +format. +.PP +If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a +package id specification which indicates which package should be built. +If it is not given, then the current package is built. +For more information on \f[I]SPEC\f[] and its format, see the "cargo +help pkgid" command. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to document. +.RS +.RE +.TP +.B \-\-open +Opens the docs in a browser after the operation. +.RS +.RE +.TP +.B \-\-no\-deps +Don\[aq]t build documentation for dependencies. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package documentation in \[aq]target/doc\[aq] +.IP +.nf +\f[C] +$\ cargo\ doc +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-fetch.1 b/src/etc/man/cargo-fetch.1 new file mode 100644 index 000000000..96c49ab88 --- /dev/null +++ b/src/etc/man/cargo-fetch.1 @@ -0,0 +1,52 @@ +.TH "CARGO\-FETCH" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-fetch \- Fetch dependencies of a package from the network +.SH SYNOPSIS +.PP +\f[I]cargo fetch\f[] [OPTIONS] +.SH DESCRIPTION +.PP +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to fetch dependencies for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-update(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-generate-lockfile.1 b/src/etc/man/cargo-generate-lockfile.1 new file mode 100644 index 000000000..313471a04 --- /dev/null +++ b/src/etc/man/cargo-generate-lockfile.1 @@ -0,0 +1,41 @@ +.TH "CARGO\-GENERATE LOCKFILE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-generate-lockfile \- Generate the lockfile for a project +.SH SYNOPSIS +.PP +\f[I]cargo generate-lockfile\f[] [OPTIONS] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to generate a lockfile for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-init.1 b/src/etc/man/cargo-init.1 new file mode 100644 index 000000000..a2b392ad1 --- /dev/null +++ b/src/etc/man/cargo-init.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-INIT" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-init \- Create a new cargo package in the current directory +.SH SYNOPSIS +.PP +\f[I]cargo init\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Create a new cargo package in the current directory. +.PP +Use the \f[B]\-\-vcs\f[] option to control the version control system to +use. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vcs \f[I]VCS\f[] +Initialize a new repository for the given version control system (git or +hg) or do not initialize any version control at all (none) overriding a +global configuration. +.RS +.RE +.TP +.B \-\-bin +Use a binary instead of a library template. +.RS +.RE +.TP +.B \-\-name \f[I]NAME\f[] +Set the resulting package name. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Initialize a binary cargo package in the current directory +.IP +.nf +\f[C] +$\ cargo\ init\ \-\-bin +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-new(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-install.1 b/src/etc/man/cargo-install.1 new file mode 100644 index 000000000..f90ad08aa --- /dev/null +++ b/src/etc/man/cargo-install.1 @@ -0,0 +1,161 @@ +.TH "CARGO\-INSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-install \- Install a Rust binary +.SH SYNOPSIS +.PP +\f[I]cargo install\f[] [OPTIONS] +.PP +\f[I]cargo install\f[] [OPTIONS] \-\-list +.SH DESCRIPTION +.PP +Install a Rust binary +.PP +This command manages Cargo\[aq]s local set of install binary crates. +Only packages which have [[bin]] targets can be installed, and all +binaries are installed into the installation root\[aq]s \f[I]bin\f[] +folder. +The installation root is determined, in order of precedence, by +\f[B]\-\-root\f[], \f[I]$CARGO_INSTALL_ROOT\f[], the +\f[I]install.root\f[] configuration key, and finally the home directory +(which is either \f[I]$CARGO_HOME\f[] if set or \f[I]$HOME/.cargo\f[] by +default). +.PP +There are multiple sources from which a crate can be installed. +The default location is crates.io but the \f[B]\-\-git\f[] and +\f[B]\-\-path\f[] flags can change this source. +If the source contains more than one package (such as \f[I]crates.io\f[] +or a git repository with multiple crates) the \f[B]\f[] argument is +required to indicate which crate should be installed. +.PP +Crates from crates.io can optionally specify the version they wish to +install via the \f[B]\-\-vers\f[] flags, and similarly packages from git +repositories can optionally specify the branch, tag, or revision that +should be installed. +If a crate has multiple binaries, the \f[B]\-\-bin\f[] argument can +selectively install only one of them, and if you\[aq]d rather install +examples the \f[B]\-\-example\f[] argument can be used as well. +.PP +As a special convenience, omitting the specification entirely +will install the crate in the current directory. +That is, \f[I]install\f[] is equivalent to the more explicit "install +\-\-path .". +.PP +If the source is crates.io or \f[B]\-\-git\f[] then by default the crate will be built in a temporary target directory. +To avoid this, the target directory can be specified by setting the \f[B]CARGO_TARGET_DIR\f[] environment variable to a relative path. +In particular, this can be useful for caching build artifacts on continuous integration systems. +.PP +The \f[B]\-\-list\f[] option will list all installed packages (and their +versions). +.SH OPTIONS +.SS Query options +.TP +.B \-\-list +List all installed packages (and their versions). +.RS +.RE +.SS Specifying what crate to install +.TP +.B \-\-vers \f[I]VERS\f[] +Specify a version to install from crates.io. +.RS +.RE +.TP +.B \-\-git \f[I]URL\f[] +Git URL to install the specified crate from. +.RS +.RE +.TP +.B \-\-branch \f[I]BRANCH\f[] +Branch to use when installing from git. +.RS +.RE +.TP +.B \-\-tag \f[I]TAG\f[] +Tag to use when installing from git. +.RS +.RE +.TP +.B \-\-rev \f[I]SHA\f[] +Specific commit to use when installing from git. +.RS +.RE +.TP +.B \-\-path \f[I]PATH\f[] +Filesystem path to local crate to install. +.RS +.RE +.SS Built and install options +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to activate. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-f, \-\-force +Force overwriting existing crates or binaries +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-debug +Build in debug mode instead of release mode. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Only install the binary NAME. +.RS +.RE +.TP +.B \-\-example \f[I]EXAMPLE\f[] +Install the example EXAMPLE instead of binaries. +.RS +.RE +.TP +.B \-\-root \f[I]DIR\f[] +Directory to install packages into. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-search(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-login.1 b/src/etc/man/cargo-login.1 new file mode 100644 index 000000000..a82c8284b --- /dev/null +++ b/src/etc/man/cargo-login.1 @@ -0,0 +1,41 @@ +.TH "CARGO\-LOGIN" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-login \- Save an API token from the registry locally +.SH SYNOPSIS +.PP +\f[I]cargo login\f[] [OPTIONS] [] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host to set the token for +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-metadata.1 b/src/etc/man/cargo-metadata.1 new file mode 100644 index 000000000..69d72535c --- /dev/null +++ b/src/etc/man/cargo-metadata.1 @@ -0,0 +1,71 @@ +.TH "CARGO\-METADATA" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-metadata \- Machine-readable metadata about the current project +.SH SYNOPSIS +.PP +\f[I]cargo metadata\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Output the resolved dependencies of a project, the concrete used versions +including overrides, in machine-readable format. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space-separated list of features. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not include the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-no\-deps +Output information only about the root package and don\[aq]t fetch +dependencies. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest. +.RS +.RE +.TP +.B \-\-format\-version \f[I]VERSION\f[] +Format version [default: 1]. Valid values: 1. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-new.1 b/src/etc/man/cargo-new.1 new file mode 100644 index 000000000..7325c5b2d --- /dev/null +++ b/src/etc/man/cargo-new.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-NEW" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-new \- Create a new cargo package +.SH SYNOPSIS +.PP +\f[I]cargo new\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Create a new cargo package at . +.PP +Use the \f[B]\-\-vcs\f[] option to control the version control system to +use. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vcs \f[I]VCS\f[] +Initialize a new repository for the given version control system (git or +hg) or do not initialize any version control at all (none) overriding a +global configuration. +.RS +.RE +.TP +.B \-\-bin +Use a binary instead of a library template. +.RS +.RE +.TP +.B \-\-name \f[I]NAME\f[] +Set the resulting package name. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Create a binary cargo package in the current directory +.IP +.nf +\f[C] +$\ cargo\ new\ \-\-bin\ ./ +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-init(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-owner.1 b/src/etc/man/cargo-owner.1 new file mode 100644 index 000000000..c690dc048 --- /dev/null +++ b/src/etc/man/cargo-owner.1 @@ -0,0 +1,88 @@ +.TH "CARGO\-OWNER" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-owner \- Manage the owners of a crate of the registry +.SH SYNOPSIS +.PP +\f[I]cargo owner\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +This command will modify the owners for a package on the specified +registry (or default). Note that owners of a package can upload new +versions, and yank old versions. Explicitly named owners can also modify +the set of owners, so take caution! +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-a, \-\-add \f[I]LOGIN\f[] +Name of a user or team to add as an owner. +.RS +.RE +.TP +.B \-r, \-\-remove \f[I]LOGIN\f[] +Name of a user or team to remove as an owner. +.RS +.RE +.TP +.B \-l, \-\-list +List owners of a crate. +.RS +.RE +.TP +.B \-\-index \f[I]INDEX\f[] +Registry index to modify owners for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Add user as an owner of the current package +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-add\ user +\f[] +.fi +.PP +Remove user as an owner of the current package +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-remove\ user +\f[] +.fi +.PP +Use a certain API token to authenticate with +.IP +.nf +\f[C] +$\ cargo\ owner\ \-\-token\ U6WHXacP3Qqwd5kze1fohr4JEOmGCuRK2 +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-publish(1), cargo\-login(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-package.1 b/src/etc/man/cargo-package.1 new file mode 100644 index 000000000..fc703b2fa --- /dev/null +++ b/src/etc/man/cargo-package.1 @@ -0,0 +1,59 @@ +.TH "CARGO\-PACKAGE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-package \- Create a distributable tarball +.SH SYNOPSIS +.PP +\f[I]cargo package\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Assemble the local package into a distributable tarball. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-l, \-\-list +Print files included in a package without making one. +.RS +.RE +.TP +.B \-\-no\-verify +Don\[aq]t verify the contents by building them. +.RS +.RE +.TP +.B \-\-no\-metadata +Ignore warnings about a lack of human\-usable metadata. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-pkgid.1 b/src/etc/man/cargo-pkgid.1 new file mode 100644 index 000000000..d06da2dde --- /dev/null +++ b/src/etc/man/cargo-pkgid.1 @@ -0,0 +1,75 @@ +.TH "CARGO\-PKGID" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-pkgid \- Print a fully qualified package specification +.SH SYNOPSIS +.PP +\f[I]cargo pkgid\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +Given a argument, print out the fully qualified package id +specifier. This command will generate an error if is ambiguous as +to which package it refers to in the dependency graph. If no is +given, then the pkgid for the local package is printed. +.PP +This command requires that a lockfile is available and dependencies have +been fetched. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to the package to clean. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Retrieve package specification for foo package +.IP +.nf +\f[C] +$\ cargo\ pkgid\ foo +\f[] +.fi +.PP +Retrieve package specification for version 1.0.0 of foo +.IP +.nf +\f[C] +$\ cargo\ pkgid\ foo:1.0.0 +\f[] +.fi +.PP +Retrieve package specification for foo from crates.io +.IP +.nf +\f[C] +$\ cargo\ pkgid\ crates.io/foo +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-generate\-lockfile(1), cargo-search(1), cargo-metadata(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-publish.1 b/src/etc/man/cargo-publish.1 new file mode 100644 index 000000000..2f5063139 --- /dev/null +++ b/src/etc/man/cargo-publish.1 @@ -0,0 +1,59 @@ +.TH "CARGO\-PUBLISH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-publish \- Upload a package to the registry. +.SH SYNOPSIS +.PP +\f[I]cargo publish\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Upload a package to the registry. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host to upload the package to. +.RS +.RE +.TP +.B \-\-token \f[I]TOKEN\f[] +Token to use when uploading. +.RS +.RE +.TP +.B \-\-no\-verify +Don\[aq]t verify package tarball before publish. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest of the package to publish. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-install(1), cargo\-search(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-run.1 b/src/etc/man/cargo-run.1 new file mode 100644 index 000000000..80473d2be --- /dev/null +++ b/src/etc/man/cargo-run.1 @@ -0,0 +1,103 @@ +.TH "CARGO\-RUN" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-run \- Run the current project +.SH SYNOPSIS +.PP +\f[I]cargo run\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Run the main binary of the local package (src/main.rs). +.PP +If neither \f[B]\-\-bin\f[] nor \f[B]\-\-example\f[] are given, then if +the project only has one bin target it will be run. +Otherwise \f[B]\-\-bin\f[] specifies the bin target to run, and +\f[B]\-\-example\f[] specifies the example target to run. +At most one of \f[B]\-\-bin\f[] or \f[B]\-\-example\f[] can be provided. +.PP +All of the trailing arguments are passed to the binary to run. +If you\[aq]re passing arguments to both Cargo and the binary, the ones +after \f[B]\-\-\f[] go to the binary, the ones before go to Cargo. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Name of the bin target to run. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Name of the example target to run. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Run the main binary of the current package +.IP +.nf +\f[C] +$\ cargo\ run +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-new(1), cargo\-init(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-rustc.1 b/src/etc/man/cargo-rustc.1 new file mode 100644 index 000000000..f5d9a3521 --- /dev/null +++ b/src/etc/man/cargo-rustc.1 @@ -0,0 +1,126 @@ +.TH "CARGO\-RUSTC" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-rustc \- Compile a package and all of its dependencies +.SH SYNOPSIS +.PP +\f[I]cargo rustc\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +.PP +The specified target for the current package (or package specified by +SPEC if provided) will be compiled along with all of its dependencies. +The specified ... +will all be passed to the final compiler invocation, not any of the +dependencies. +Note that the compiler will still unconditionally receive arguments such +as \-L, \-\-extern, and \-\-crate\-type, and the specified ... +will simply be added to the compiler invocation. +.PP +This command requires that only one target is being compiled. +If more than one target is available for the current package the filters +of \-\-lib, \-\-bin, etc, must be used to select which target is +compiled. +To pass flags to all compiler processes spawned by Cargo, use the +$RUSTFLAGS environment variable or the \f[C]build.rustflags\f[] +configuration option. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package SPEC\f[] +The profile to compiler for. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-profile \f[I]PROFILE +Profile to build the selected target for. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +The version to yank or un\-yank. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not compile default features for the package. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Target triple which compiles will be for. +.RS +.RE +.TP +.B \-\-manifest-path \f[I]PATH\f[] +Path to the manifest to fetch dependencies for. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-run(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-rustdoc.1 b/src/etc/man/cargo-rustdoc.1 new file mode 100644 index 000000000..3a898a31a --- /dev/null +++ b/src/etc/man/cargo-rustdoc.1 @@ -0,0 +1,124 @@ +.TH "CARGO\-RUSTDOC" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-rustdoc \- Build a package\[aq]s documentation, using specified +custom flags. + +.SH SYNOPSIS +.PP +\f[I]cargo rustdoc\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +The specified target for the current package (or package specified by +SPEC if provided) will be documented with the specified ... +being passed to the final rustdoc invocation. +Dependencies will not be documented as part of this command. +Note that rustdoc will still unconditionally receive arguments such as +\-L, \-\-extern, and \-\-crate\-type, and the specified ... +will simply be added to the rustdoc invocation. +.PP +If the \-\-package argument is given, then SPEC is a package id +specification which indicates which package should be documented. +If it is not given, then the current package is documented. +For more information on SPEC and its format, see the +\f[C]cargo\ help\ pkgid\f[] command. + +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-open +Open the docs in a browser after the operation. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC\f[] +Package to document. +.RS +.RE +.TP +.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-lib +Build only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Build only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Build only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Build only the specified test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Build only the specified benchmark target. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to document. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo-doc(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-search.1 b/src/etc/man/cargo-search.1 new file mode 100644 index 000000000..e8b1da3ca --- /dev/null +++ b/src/etc/man/cargo-search.1 @@ -0,0 +1,49 @@ +.TH "CARGO\-SEARCH" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-search \- Search packages in crates.io +.SH SYNOPSIS +.PP +\f[I]cargo search\f[] [OPTIONS] ... +.SH DESCRIPTION +.PP +Search packages in \f[I]crates.io\f[]. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-host \f[I]HOST\f[] +Host of a registry to search in. +.RS +.RE +.TP +.B \-\-limit \f[I]LIMIT\f[] +Limit the number of results (default: 10, max: 100). +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-install(1), cargo\-publish(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-test.1 b/src/etc/man/cargo-test.1 new file mode 100644 index 000000000..2d9907f0b --- /dev/null +++ b/src/etc/man/cargo-test.1 @@ -0,0 +1,172 @@ +.TH "CARGO\-TEST" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-test \- Execute unit and integration tests of a package +.SH SYNOPSIS +.PP +\f[I]cargo test\f[] [OPTIONS] [\-\-] [...] +.SH DESCRIPTION +.PP +Execute all unit and integration tests of a local package. +.PP +All of the trailing arguments are passed to the test binaries generated +for filtering tests and generally providing options configuring how they +run. +For example, this will run all tests with the name \[aq]foo\[aq] in +their name: +.IP +.nf +\f[C] +cargo\ test\ foo +\f[] +.fi +.PP +If the \f[B]\-\-package\f[] argument is given, then \[aq]SPEC\[aq] is a +package id specification which indicates which package should be tested. +If it is not given, then the current package is tested. +For more information on \[aq]SPEC\[aq] and its format, see the "cargo +help pkgid" command. +.PP +The \f[B]\-\-jobs\f[] argument affects the building of the test +executable but does not affect how many jobs are used when running the +tests. +.PP +Compilation can be configured via the \[aq]test\[aq] profile in the +manifest. +.PP +By default the rust test harness hides output from test execution to +keep results readable. +Test output can be recovered (e.g. +for debugging) by passing \f[B]\-\-nocapture\f[] to the test binaries: +.IP +.nf +\f[C] +cargo\ test\ \-\-\ \-\-nocapture +\f[] +.fi +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-lib +Test only this package\[aq]s library. +.RS +.RE +.TP +.B \-\-doc +Test only this library\[aq]s documentation +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Test only the specified binary. +.RS +.RE +.TP +.B \-\-example \f[I]NAME\f[] +Test only the specified example. +.RS +.RE +.TP +.B \-\-test \f[I]NAME\f[] +Test only the specified integration test target. +.RS +.RE +.TP +.B \-\-bench \f[I]NAME\f[] +Test only the specified benchmark target. +.RS +.RE +.TP +.B \-\-no\-run +Compile, but don\[aq]t run tests. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to run tests for. +.RS +.RE +.TP +.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[] +Number of parallel jobs, defaults to # of CPUs. +.RS +.RE +.TP +.B \-\-release +Build artifacts in release mode, with optimizations. +.RS +.RE +.TP +.B \-\-features \f[I]FEATURES\f[] +Space\-separated list of features to also build. +.RS +.RE +.TP +.B \-\-all\-features +Build all available features. +.RS +.RE +.TP +.B \-\-no\-default\-features +Do not build the \f[C]default\f[] feature. +.RS +.RE +.TP +.B \-\-target \f[I]TRIPLE\f[] +Build for the target triple. +.RS +.RE +.TP +.B \-\-manifest\-path \f[I]PATH\f[] +Path to the manifest to compile. +.RS +.RE +.TP +.B \-\-no\-fail\-fast +Run all tests regardless of failure. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH EXAMPLES +.PP +Execute all the unit and integration tests of the current package +.IP +.nf +\f[C] +$\ cargo\ test +\f[] +.fi +.PP +Execute the BENCH benchmark +.IP +.nf +\f[C] +$\ cargo\ test\ \-\-bench\ BENCH +\f[] +.fi +.SH SEE ALSO +.PP +cargo(1), cargo\-build(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-uninstall.1 b/src/etc/man/cargo-uninstall.1 new file mode 100644 index 000000000..64e9aa7f0 --- /dev/null +++ b/src/etc/man/cargo-uninstall.1 @@ -0,0 +1,56 @@ +.TH "CARGO\-UNINSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-uninstall \- Remove a Rust binary +.SH SYNOPSIS +.PP +\f[I]cargo uninstall\f[] [OPTIONS] +.PP +\f[I]cargo uninstall\f[] (\-h | \-\-help) +.SH DESCRIPTION +.PP +The argument SPEC is a package id specification (see +\f[C]cargo\ help\ pkgid\f[]) to specify which crate should be +uninstalled. +By default all binaries are uninstalled for a crate but the +\f[C]\-\-bin\f[] and \f[C]\-\-example\f[] flags can be used to only +uninstall particular binaries. +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-root \f[I]DIR\f[] +Directory to uninstall packages from. +.RS +.RE +.TP +.B \-\-bin \f[I]NAME\f[] +Only uninstall the binary NAME. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo-install(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-update.1 b/src/etc/man/cargo-update.1 new file mode 100644 index 000000000..14b64374d --- /dev/null +++ b/src/etc/man/cargo-update.1 @@ -0,0 +1,80 @@ +.TH "CARGO\-UPDATE" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-update \- Update the package dependencies +.SH SYNOPSIS +.PP +\f[I]cargo update\f[] [OPTIONS] +.SH DESCRIPTION +.PP +Update dependencies as recorded in the local lock file. +.PP +This command requires that a \f[I]Cargo.lock\f[] already exists as +generated by \f[I]cargo build\f[] or related commands. +.PP +If \f[I]SPEC\f[] is given, then a conservative update of the +\f[I]lockfile\f[] will be performed. +This means that only the dependency specified by \f[I]SPEC\f[] will be +updated. +Its transitive dependencies will be updated only if \f[I]SPEC\f[] cannot +be updated without updating dependencies. +All other dependencies will remain locked at their currently recorded +versions. +.PP +If \f[I]PRECISE\f[] is specified, then \f[B]\-\-aggressive\f[] must not +also be specified. +The argument \f[I]PRECISE\f[] is a string representing a precise +revision that the package being updated should be updated to. +For example, if the package comes from a git repository, then +\f[I]PRECISE\f[] would be the exact revision that the repository should +be updated to. +.PP +If \f[I]SPEC\f[] is not given, then all dependencies will be +re\-resolved and updated. +.PP +For more information about package id specifications, see "cargo help +pkgid". +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[] +Package to update. +.RS +.RE +.TP +.B \-\-aggressive +Force updating all dependencies of as well. +.RS +.RE +.TP +.B \-\-precise \f[I]PRECISE\f[] +Update a single dependency to exactly \f[I]PRECISE\f[]. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-version.1 b/src/etc/man/cargo-version.1 new file mode 100644 index 000000000..c78344d3f --- /dev/null +++ b/src/etc/man/cargo-version.1 @@ -0,0 +1,31 @@ +.TH "CARGO\-VERSION" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-version \- Show version information +.SH SYNOPSIS +.PP +\f[I]cargo version\f[] [OPTIONS] +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo-yank.1 b/src/etc/man/cargo-yank.1 new file mode 100644 index 000000000..f54b2bd6b --- /dev/null +++ b/src/etc/man/cargo-yank.1 @@ -0,0 +1,68 @@ +.TH "CARGO\-YANK" "1" "July 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo\-yank \- Remove a pushed crate from the index +.SH SYNOPSIS +.PP +\f[I]cargo yank\f[] [OPTIONS] [] +.SH DESCRIPTION +.PP +The yank command removes a previously pushed crate\[aq]s version from +the server\[aq]s index. +This command does not delete any data, and the crate will still be +available for download via the registry\[aq]s download link. +.PP +Note that existing crates locked to a yanked version will still be able +to download the yanked version to use it. +Cargo will, however, not allow any new crates to be locked to any yanked +version. +.PP +.SH OPTIONS +.TP +.B \-h, \-\-help +Print this message. +.RS +.RE +.TP +.B \-\-vers \f[I]VERSION\f[] +The version to yank or un-yank. +.RS +.RE +.TP +.B \-\-undo +Undo a yank, putting a version back into the index. +.RS +.RE +.TP +.B \-\-index \f[I]INDEX\f[] +Registry index to yank from. +.RS +.RE +.TP +.B \-\-token \f[I]TOKEN\f[] +API token to use when authenticating. +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-q, \-\-quiet +No output printed to stdout. +.RS +.RE +.TP +.B \-\-color \f[I]WHEN\f[] +Coloring: auto, always, never. +.RS +.RE +.SH SEE ALSO +.PP +cargo(1), cargo\-owner(1), cargo\-version(1) +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/src/etc/man/cargo.1 b/src/etc/man/cargo.1 new file mode 100644 index 000000000..8baedcec9 --- /dev/null +++ b/src/etc/man/cargo.1 @@ -0,0 +1,206 @@ +.TH "CARGO" "1" "May 2016" "The Rust package manager" "Cargo Manual" +.hy +.SH NAME +.PP +cargo \- The Rust package manager +.SH SYNOPSIS +.PP +\f[I]cargo\f[] [...] +.SH DESCRIPTION +.PP +This program is a package manager for the Rust language, available at +. +.SH OPTIONS +.TP +.B \-h, \-\-help +Display a help message. +.RS +.RE +.TP +.B \-V, \-\-version +Print version information and exit. +.RS +.RE +.TP +.B \-\-list +List all available cargo commands. +.RS +.RE +.TP +.B \-\-explain CODE +Run \f[C]rustc\ \-\-explain\ CODE\f[] +.RS +.RE +.TP +.B \-v, \-\-verbose +Use verbose output. +.RS +.RE +.TP +.B \-\-color +Configure coloring of output. +.RS +.RE +.SH COMMANDS +.PP +To get extended information about commands, run \f[I]cargo help +\f[] or \f[I]man cargo\-command\f[] +.TP +.B cargo\-build(1) +Compile the current project. +.RS +.RE +.TP +.B cargo\-clean(1) +Remove the target directory with build output. +.RS +.RE +.TP +.B cargo\-doc(1) +Build this project\[aq]s and its dependencies\[aq] documentation. +.RS +.RE +.TP +.B cargo\-init(1) +Create a new cargo project in the current directory. +.RS +.RE +.TP +.B cargo\-install(1) +Install a Rust binary. +.RS +.RE +.TP +.B cargo\-new(1) +Create a new cargo project. +.RS +.RE +.TP +.B cargo\-run(1) +Build and execute src/main.rs. +.RS +.RE +.TP +.B cargo\-test(1) +Run the tests for the package. +.RS +.RE +.TP +.B cargo\-bench(1) +Run the benchmarks for the package. +.RS +.RE +.TP +.B cargo\-update(1) +Update dependencies in Cargo.lock. +.RS +.RE +.TP +.B cargo\-rustc(1) +Compile the current project, and optionally pass additional rustc parameters +.RS +.RE +.TP +.B cargo\-package(1) +Generate a source tarball for the current package. +.RS +.RE +.TP +.B cargo\-publish(1) +Package and upload this project to the registry. +.RS +.RE +.TP +.B cargo\-owner(1) +Manage the owners of a crate on the registry. +.RS +.RE +.TP +.B cargo\-uninstall(1) +Remove a Rust binary. +.RS +.RE +.TP +.B cargo\-search(1) +Search registry for crates. +.RS +.RE +.TP +.B cargo\-help(1) +Display help for a cargo command +.RS +.RE +.TP +.B cargo\-version(1) +Print cargo\[aq]s version and exit. +.RS +.RE +.SH FILES +.TP +.B ~/.cargo +Directory in which Cargo stores repository data. +Cargo can be instructed to use a \f[I]\&.cargo\f[] subdirectory in a +different location by setting the \f[B]CARGO_HOME\f[] environment +variable. +.RS +.RE +.SH EXAMPLES +.PP +Build a local package and all of its dependencies +.IP +.nf +\f[C] +$\ cargo\ build +\f[] +.fi +.PP +Build a package with optimizations +.IP +.nf +\f[C] +$\ cargo\ build\ \-\-release +\f[] +.fi +.PP +Run tests for a cross\-compiled target +.IP +.nf +\f[C] +$\ cargo\ test\ \-\-target\ i686\-unknown\-linux\-gnu +\f[] +.fi +.PP +Create a new project that builds an executable +.IP +.nf +\f[C] +$\ cargo\ new\ \-\-bin\ foobar +\f[] +.fi +.PP +Create a project in the current directory +.IP +.nf +\f[C] +$\ mkdir\ foo\ &&\ cd\ foo +$\ cargo\ init\ . +\f[] +.fi +.PP +Learn about a command\[aq]s options and usage +.IP +.nf +\f[C] +$\ cargo\ help\ clean +\f[] +.fi +.SH SEE ALSO +.PP +rustc(1), rustdoc(1) +.SH BUGS +.PP +See for issues. +.SH COPYRIGHT +.PP +This work is dual\-licensed under Apache 2.0 and MIT terms. +See \f[I]COPYRIGHT\f[] file in the cargo source distribution. diff --git a/tests/testsuite/alt_registry.rs b/tests/testsuite/alt_registry.rs new file mode 100644 index 000000000..089dac9ce --- /dev/null +++ b/tests/testsuite/alt_registry.rs @@ -0,0 +1,590 @@ +use cargotest::ChannelChanger; +use cargotest::support::registry::{self, alt_api_path, Package}; +use cargotest::support::{execs, paths, project}; +use hamcrest::assert_that; +use std::fs::File; +use std::io::Write; + +#[test] +fn is_feature_gated() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs() + .with_status(101) + .with_stderr_contains(" feature `alternative-registries` is required"), + ); +} + +#[test] +fn depend_on_alt_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::alt_registry() + )), + ); + + assert_that( + p.cargo("clean").masquerade_as_nightly_cargo(), + execs().with_status(0), + ); + + // Don't download a second time + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn depend_on_alt_registry_depends_on_same_registry_no_index() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").alternative(true).publish(); + Package::new("bar", "0.0.1") + .dep("baz", "0.0.1") + .alternative(true) + .publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::alt_registry() + )), + ); +} + +#[test] +fn depend_on_alt_registry_depends_on_same_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").alternative(true).publish(); + Package::new("bar", "0.0.1") + .registry_dep("baz", "0.0.1", registry::alt_registry().as_str()) + .alternative(true) + .publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::alt_registry() + )), + ); +} + +#[test] +fn depend_on_alt_registry_depends_on_crates_io() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1") + .registry_dep("baz", "0.0.1", registry::registry().as_str()) + .alternative(true) + .publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{alt_reg}` +[UPDATING] registry `{reg}` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + alt_reg = registry::alt_registry(), + reg = registry::registry() + )), + ); +} + +#[test] +fn registry_and_path_dep_works() { + registry::init(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn registry_incompatible_with_git() { + registry::init(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(101) + .with_stderr_contains(" dependency (bar) specification is ambiguous. Only one of `git` or `registry` is allowed.")); +} + +#[test] +fn cannot_publish_to_crates_io_with_registry_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + [project] + name = "foo" + version = "0.0.1" + authors = [] + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--index") + .arg(registry::registry().to_string()), + execs().with_status(101), + ); +} + +#[test] +fn publish_with_registry_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Login so that we have the token available + assert_that( + p.cargo("login") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("TOKEN") + .arg("-Zunstable-options"), + execs().with_status(0), + ); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(0), + ); +} + +#[test] +fn alt_registry_and_crates_io_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + crates_io_dep = "0.0.1" + + [dependencies.alt_reg_dep] + version = "0.1.0" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("crates_io_dep", "0.0.1").publish(); + Package::new("alt_reg_dep", "0.1.0") + .alternative(true) + .publish(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs() + .with_status(0) + .with_stderr_contains(format!( + "[UPDATING] registry `{}`", + registry::alt_registry() + )) + .with_stderr_contains(&format!("[UPDATING] registry `{}`", registry::registry())) + .with_stderr_contains("[DOWNLOADING] crates_io_dep v0.0.1 (registry `file://[..]`)") + .with_stderr_contains("[DOWNLOADING] alt_reg_dep v0.1.0 (registry `file://[..]`)") + .with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `file://[..]`)") + .with_stderr_contains("[COMPILING] crates_io_dep v0.0.1") + .with_stderr_contains(&format!("[COMPILING] foo v0.0.1 ({})", p.url())) + .with_stderr_contains( + "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs", + ), + ) +} + +#[test] +fn block_publish_due_to_no_token() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Setup the registry by publishing a package + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Now perform the actual publish + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs() + .with_status(101) + .with_stderr_contains("error: no upload token found, please run `cargo login`"), + ); +} + +#[test] +fn publish_to_alt_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Setup the registry by publishing a package + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Login so that we have the token available + assert_that( + p.cargo("login") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("TOKEN") + .arg("-Zunstable-options"), + execs().with_status(0), + ); + + // Now perform the actual publish + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(0), + ); + + // Ensure that the crate is uploaded + assert!(alt_api_path().join("api/v1/crates/new").exists()); +} + +#[test] +fn publish_with_crates_io_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = ["me"] + license = "MIT" + description = "foo" + + [dependencies.bar] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + // Login so that we have the token available + assert_that( + p.cargo("login") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("TOKEN") + .arg("-Zunstable-options"), + execs().with_status(0), + ); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(0), + ); +} + +#[test] +fn credentials_in_url_forbidden() { + registry::init(); + + let config = paths::home().join(".cargo/config"); + + File::create(config) + .unwrap() + .write_all( + br#" + [registries.alternative] + index = "ssh://git:secret@foobar.com" + "#, + ) + .unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs() + .with_status(101) + .with_stderr_contains("error: Registry URLs may not contain credentials"), + ); +} diff --git a/tests/testsuite/bad_config.rs b/tests/testsuite/bad_config.rs new file mode 100644 index 000000000..574d84bff --- /dev/null +++ b/tests/testsuite/bad_config.rs @@ -0,0 +1,1430 @@ +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +#[test] +fn bad1() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [target] + nonexistent-target = "foo" + "#, + ) + .build(); + assert_that( + p.cargo("build") + .arg("-v") + .arg("--target=nonexistent-target"), + execs().with_status(101).with_stderr( + "\ +[ERROR] expected table for configuration key `target.nonexistent-target`, \ +but found string in [..]config +", + ), + ); +} + +#[test] +fn bad2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [http] + proxy = 3.0 + "#, + ) + .build(); + assert_that( + p.cargo("publish").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + failed to load TOML configuration from `[..]config` + +Caused by: + failed to parse key `http` + +Caused by: + failed to parse key `proxy` + +Caused by: + found TOML configuration value of unknown type `float` +", + ), + ); +} + +#[test] +fn bad3() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [http] + proxy = true + "#, + ) + .build(); + Package::new("foo", "1.0.0").publish(); + + assert_that( + p.cargo("publish").arg("-v"), + execs().with_status(101).with_stderr( + "\ +error: failed to update registry [..] + +Caused by: + invalid configuration for key `http.proxy` +expected a string, but found a boolean for `http.proxy` in [..]config +", + ), + ); +} + +#[test] +fn bad4() { + let p = project("foo") + .file( + ".cargo/config", + r#" + [cargo-new] + name = false + "#, + ) + .build(); + assert_that( + p.cargo("new").arg("-v").arg("foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Failed to create project `foo` at `[..]` + +Caused by: + invalid configuration for key `cargo-new.name` +expected a string, but found a boolean for `cargo-new.name` in [..]config +", + ), + ); +} + +#[test] +fn bad5() { + let p = project("foo") + .file( + ".cargo/config", + r#" + foo = "" + "#, + ) + .file( + "foo/.cargo/config", + r#" + foo = 2 + "#, + ) + .build(); + assert_that( + p.cargo("new") + .arg("-v") + .arg("foo") + .cwd(&p.root().join("foo")), + execs().with_status(101).with_stderr( + "\ +[ERROR] Failed to create project `foo` at `[..]` + +Caused by: + Couldn't load Cargo configuration + +Caused by: + failed to merge configuration at `[..]` + +Caused by: + failed to merge key `foo` between files: + file 1: [..]foo[..]foo[..]config + file 2: [..]foo[..]config + +Caused by: + expected integer, but found string +", + ), + ); +} + +#[test] +fn bad_cargo_config_jobs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = -1 + "#, + ) + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(101) + .with_stderr("[ERROR] build.jobs must be positive, but found -1 in [..]"), + ); +} + +#[test] +fn default_cargo_config_jobs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = 1 + "#, + ) + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn good_cargo_config_jobs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = 4 + "#, + ) + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn invalid_global_config() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file(".cargo/config", "4") + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found eof at line 1 +", + ), + ); +} + +#[test] +fn bad_cargo_lock() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("Cargo.lock", "[[package]]\nfoo = 92") + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse lock file at: [..]Cargo.lock + +Caused by: + missing field `name` for key `package` +", + ), + ); +} + +#[test] +fn duplicate_packages_in_cargo_lock() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "bar" + version = "0.0.1" + dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "foo" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "foo" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + package `foo` is specified twice in the lockfile +", + ), + ); +} + +#[test] +fn bad_source_in_cargo_lock() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "bar" + version = "0.0.1" + dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "foo" + version = "0.1.0" + source = "You shall not parse" + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + invalid source `You shall not parse` for key `package.source` +", + ), + ); +} + +#[test] +fn bad_dependency_in_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--verbose"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + package `bar 0.1.0 ([..])` is specified as a dependency, but is missing from the package list +", + ), + ); +} + +#[test] +fn bad_git_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = { git = "file:.." } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] git repository `file:///` +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update file:/// + +Caused by: + failed to clone into: [..] + +Caused by: + [..]'file:///' is not a valid local file URI[..] +", + ), + ); +} + +#[test] +fn bad_crate_type() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + crate-type = ["bad_type", "rlib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains( + "error: failed to run `rustc` to learn about crate-type bad_type information", + ), + ); +} + +#[test] +fn malformed_override() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [target.x86_64-apple-darwin.freetype] + native = { + foo: "bar" + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected a table key, found a newline at line 8 +", + ), + ); +} + +#[test] +fn duplicate_binary_names() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bin]] + name = "e" + path = "a.rs" + + [[bin]] + name = "e" + path = "b.rs" + "#, + ) + .file("a.rs", r#"fn main() -> () {}"#) + .file("b.rs", r#"fn main() -> () {}"#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate binary name e, but all binary targets must have a unique name +", + ), + ); +} + +#[test] +fn duplicate_example_names() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[example]] + name = "ex" + path = "examples/ex.rs" + + [[example]] + name = "ex" + path = "examples/ex2.rs" + "#, + ) + .file("examples/ex.rs", r#"fn main () -> () {}"#) + .file("examples/ex2.rs", r#"fn main () -> () {}"#) + .build(); + + assert_that( + p.cargo("build").arg("--example").arg("ex"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate example name ex, but all example targets must have a unique name +", + ), + ); +} + +#[test] +fn duplicate_bench_names() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bench]] + name = "ex" + path = "benches/ex.rs" + + [[bench]] + name = "ex" + path = "benches/ex2.rs" + "#, + ) + .file("benches/ex.rs", r#"fn main () {}"#) + .file("benches/ex2.rs", r#"fn main () {}"#) + .build(); + + assert_that( + p.cargo("bench"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate bench name ex, but all bench targets must have a unique name +", + ), + ); +} + +#[test] +fn duplicate_deps() { + let p = project("foo") + .file( + "shim-bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "shim-bar/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .file( + "linux-bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "linux-bar/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#, + ) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +", + ), + ); +} + +#[test] +fn duplicate_deps_diff_sources() { + let p = project("foo") + .file( + "shim-bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "shim-bar/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .file( + "linux-bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "linux-bar/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [target.i686-unknown-linux-gnu.dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#, + ) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +", + ), + ); +} + +#[test] +fn unused_keys() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [target.foo] + bar = "3" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: unused manifest key: target.foo.bar +[COMPILING] foo v0.1.0 (file:///[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + bulid = "foo" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: unused manifest key: project.bulid +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + build = "foo" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: unused manifest key: lib.build +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn empty_dependencies() { + let p = project("empty_deps") + .file( + "Cargo.toml", + r#" + [package] + name = "empty_deps" + version = "0.0.0" + authors = [] + + [dependencies] + foo = {} + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.0.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr_contains( + "\ +warning: dependency (foo) specified without providing a local path, Git repository, or version \ +to use. This will be considered an error in future versions +", + ), + ); +} + +#[test] +fn invalid_toml_historically_allowed_is_warned() { + let p = project("empty_deps") + .file( + "Cargo.toml", + r#" + [package] + name = "empty_deps" + version = "0.0.0" + authors = [] + "#, + ) + .file( + ".cargo/config", + r#" + [foo] bar = 2 + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: TOML file found which contains invalid syntax and will soon not parse +at `[..]config`. + +The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future. +[COMPILING] empty_deps v0.0.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn ambiguous_git_reference() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1" + branch = "master" + tag = "some-tag" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_stderr_contains( + "\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `branch`, `tag` or `rev` is allowed. \ +This will be considered an error in future versions +", + ), + ); +} + +#[test] +fn bad_source_config1() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.foo] + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("error: no source URL specified for `source.foo`, need [..]"), + ); +} + +#[test] +fn bad_source_config2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + could not find a configured source with the name `bar` \ + when attempting to lookup `crates-io` (configuration in [..]) +", + ), + ); +} + +#[test] +fn bad_source_config3() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'crates-io' + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + detected a cycle of `replace-with` sources, [..] +", + ), + ); +} + +#[test] +fn bad_source_config4() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'http://example.com' + replace-with = 'crates-io' + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + detected a cycle of `replace-with` sources, the source `crates-io` is \ + eventually replaced with itself (configuration in [..]) +", + ), + ); +} + +#[test] +fn bad_source_config5() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'not a url' + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: configuration key `source.bar.registry` specified an invalid URL (in [..]) + +Caused by: + invalid url `not a url`: [..] +", + ), + ); +} + +#[test] +fn both_git_and_path_specified() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1" + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + foo.cargo("build").arg("-v"), + execs().with_stderr_contains( + "\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `git` or `path` is allowed. \ +This will be considered an error in future versions +", + ), + ); +} + +#[test] +fn bad_source_config6() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = ['not', 'a', 'string'] + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "error: expected a string, but found a array for `source.crates-io.replace-with` in [..]", + ), + ); +} + +#[test] +fn ignored_git_revision() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + branch = "spam" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + foo.cargo("build").arg("-v"), + execs().with_stderr_contains( + "\ + [WARNING] key `branch` is ignored for dependency (bar). \ + This will be considered an error in future versions", + ), + ); +} + +#[test] +fn bad_source_config7() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.foo] + registry = 'http://example.com' + local-registry = 'file:///another/file' + "#, + ) + .build(); + + Package::new("bar", "0.1.0").publish(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("error: more than one source URL specified for `source.foo`"), + ); +} + +#[test] +fn bad_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a version string like [..] +", + ), + ); +} + +#[test] +fn bad_debuginfo() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 'a' + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: string \"a\", expected a boolean or an integer for [..] +", + ), + ); +} + +#[test] +fn bad_opt_level() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a boolean or a string for key [..] +", + ), + ); +} diff --git a/tests/testsuite/bad_manifest_path.rs b/tests/testsuite/bad_manifest_path.rs new file mode 100644 index 000000000..c3fb92f06 --- /dev/null +++ b/tests/testsuite/bad_manifest_path.rs @@ -0,0 +1,391 @@ +use cargotest::support::{basic_bin_manifest, execs, main_file, project}; +use hamcrest::assert_that; + +fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo(command) + .arg("--manifest-path") + .arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr( + "[ERROR] the manifest-path must be a path \ + to a Cargo.toml file", + ), + ); +} + +fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { + let p = project("foo").build(); + let expected_path = manifest_path_argument + .split('/') + .collect::>() + .join("[..]"); + + assert_that( + p.cargo(command) + .arg("--manifest-path") + .arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr(format!( + "[ERROR] manifest path `{}` does not exist", + expected_path + )), + ); +} + +#[test] +fn bench_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("bench", "foo"); +} + +#[test] +fn bench_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[test] +fn bench_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[test] +fn bench_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn build_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("build", "foo"); +} + +#[test] +fn build_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[test] +fn build_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[test] +fn build_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn clean_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("clean", "foo"); +} + +#[test] +fn clean_dir_plus_file() { + assert_not_a_cargo_toml("clean", "foo/bar"); +} + +#[test] +fn clean_dir_plus_path() { + assert_not_a_cargo_toml("clean", "foo/bar/baz"); +} + +#[test] +fn clean_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn doc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("doc", "foo"); +} + +#[test] +fn doc_dir_plus_file() { + assert_not_a_cargo_toml("doc", "foo/bar"); +} + +#[test] +fn doc_dir_plus_path() { + assert_not_a_cargo_toml("doc", "foo/bar/baz"); +} + +#[test] +fn doc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn fetch_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("fetch", "foo"); +} + +#[test] +fn fetch_dir_plus_file() { + assert_not_a_cargo_toml("fetch", "foo/bar"); +} + +#[test] +fn fetch_dir_plus_path() { + assert_not_a_cargo_toml("fetch", "foo/bar/baz"); +} + +#[test] +fn fetch_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn generate_lockfile_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("generate-lockfile", "foo"); +} + +#[test] +fn generate_lockfile_dir_plus_file() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); +} + +#[test] +fn generate_lockfile_dir_plus_path() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); +} + +#[test] +fn generate_lockfile_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn package_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("package", "foo"); +} + +#[test] +fn package_dir_plus_file() { + assert_not_a_cargo_toml("package", "foo/bar"); +} + +#[test] +fn package_dir_plus_path() { + assert_not_a_cargo_toml("package", "foo/bar/baz"); +} + +#[test] +fn package_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn pkgid_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("pkgid", "foo"); +} + +#[test] +fn pkgid_dir_plus_file() { + assert_not_a_cargo_toml("pkgid", "foo/bar"); +} + +#[test] +fn pkgid_dir_plus_path() { + assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); +} + +#[test] +fn pkgid_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn publish_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("publish", "foo"); +} + +#[test] +fn publish_dir_plus_file() { + assert_not_a_cargo_toml("publish", "foo/bar"); +} + +#[test] +fn publish_dir_plus_path() { + assert_not_a_cargo_toml("publish", "foo/bar/baz"); +} + +#[test] +fn publish_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn read_manifest_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("read-manifest", "foo"); +} + +#[test] +fn read_manifest_dir_plus_file() { + assert_not_a_cargo_toml("read-manifest", "foo/bar"); +} + +#[test] +fn read_manifest_dir_plus_path() { + assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); +} + +#[test] +fn read_manifest_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn run_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("run", "foo"); +} + +#[test] +fn run_dir_plus_file() { + assert_not_a_cargo_toml("run", "foo/bar"); +} + +#[test] +fn run_dir_plus_path() { + assert_not_a_cargo_toml("run", "foo/bar/baz"); +} + +#[test] +fn run_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn rustc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("rustc", "foo"); +} + +#[test] +fn rustc_dir_plus_file() { + assert_not_a_cargo_toml("rustc", "foo/bar"); +} + +#[test] +fn rustc_dir_plus_path() { + assert_not_a_cargo_toml("rustc", "foo/bar/baz"); +} + +#[test] +fn rustc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn test_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("test", "foo"); +} + +#[test] +fn test_dir_plus_file() { + assert_not_a_cargo_toml("test", "foo/bar"); +} + +#[test] +fn test_dir_plus_path() { + assert_not_a_cargo_toml("test", "foo/bar/baz"); +} + +#[test] +fn test_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn update_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("update", "foo"); +} + +#[test] +fn update_dir_plus_file() { + assert_not_a_cargo_toml("update", "foo/bar"); +} + +#[test] +fn update_dir_plus_path() { + assert_not_a_cargo_toml("update", "foo/bar/baz"); +} + +#[test] +fn update_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); +} + +#[test] +fn verify_project_dir_containing_cargo_toml() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(1).with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ), + ); +} + +#[test] +fn verify_project_dir_plus_file() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg("foo/bar") + .cwd(p.root().parent().unwrap()), + execs().with_status(1).with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ), + ); +} + +#[test] +fn verify_project_dir_plus_path() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg("foo/bar/baz") + .cwd(p.root().parent().unwrap()), + execs().with_status(1).with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ), + ); +} + +#[test] +fn verify_project_dir_to_nonexistent_cargo_toml() { + let p = project("foo").build(); + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg("foo/bar/baz/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(1).with_stdout( + "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ + ", + ), + ); +} diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs new file mode 100644 index 000000000..ce05e1dfe --- /dev/null +++ b/tests/testsuite/bench.rs @@ -0,0 +1,1839 @@ +use std::str; + +use cargo::util::process; +use cargotest::is_nightly; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::{basic_bin_manifest, basic_lib_manifest, execs, project}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn cargo_bench_simple() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + }"#, + ) + .build(); + + assert_that(p.cargo("build"), execs()); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); + + assert_that( + p.cargo("bench"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.5.0 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench_hello ... bench: [..]"), + ); +} + +#[test] +fn bench_bench_implicit() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#, + ) + .file( + "tests/other.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/mybench.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--benches"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]mybench-[..][EXE] +", + dir = p.url() + )) + .with_stdout_contains("test run2 ... bench: [..]"), + ); +} + +#[test] +fn bench_bin_implicit() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#, + ) + .file( + "tests/other.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/mybench.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--bins"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +", + dir = p.url() + )) + .with_stdout_contains("test run1 ... bench: [..]"), + ); +} + +#[test] +fn bench_tarname() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "benches/bin1.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin2.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--bench").arg("bin2"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]bin2-[..][EXE] +", + dir = p.url() + )) + .with_stdout_contains("test run2 ... bench: [..]"), + ); +} + +#[test] +fn bench_multiple_targets() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "benches/bin1.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin2.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin3.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + assert_that( + p.cargo("bench") + .arg("--bench") + .arg("bin1") + .arg("--bench") + .arg("bin2"), + execs() + .with_status(0) + .with_stdout_contains("test run1 ... bench: [..]") + .with_stdout_contains("test run2 ... bench: [..]") + .with_stdout_does_not_contain("run3"), + ); +} + +#[test] +fn cargo_bench_verbose() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bench_hello(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("-v").arg("hello"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`", + url = p.url() + )) + .with_stdout_contains("test bench_hello ... bench: [..]"), + ); +} + +#[test] +fn many_similar_names() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + pub fn foo() {} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "src/main.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } + ", + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate foo; + extern crate test; + #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } + "#, + ) + .build(); + + let output = p.cargo("bench").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("test bin_bench"), + "bin_bench missing\n{}", + output + ); + assert!( + output.contains("test lib_bench"), + "lib_bench missing\n{}", + output + ); + assert!( + output.contains("test bench_bench"), + "bench_bench missing\n{}", + output + ); +} + +#[test] +fn cargo_bench_failing_test() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "nope") + }"#, + ) + .build(); + + assert_that(p.cargo("build"), execs()); + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n")); + + // Force libtest into serial execution so that the test header will be printed. + assert_that( + p.cargo("bench").arg("--").arg("--test-threads=1"), + execs() + .with_stdout_contains("test bench_hello ...[..]") + .with_stderr_contains(format!( + "\ +[COMPILING] foo v0.5.0 ({})[..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_either_contains( + "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]", + ) + .with_either_contains("[..]left: `\"hello\"`[..]") + .with_either_contains("[..]right: `\"nope\"`[..]") + .with_either_contains("[..]src[/]main.rs:15[..]") + .with_status(101), + ); +} + +#[test] +fn bench_with_lib_dep() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "src/main.rs", + " + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]", + p.url() + )) + .with_stdout_contains("test lib_bench ... bench: [..]") + .with_stdout_contains("test bin_bench ... bench: [..]"), + ); +} + +#[test] +fn bench_with_deep_lib_dep() { + if !is_nightly() { + return; + } + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + #[bench] + fn bar_bench(_b: &mut test::Bencher) { + foo::foo(); + } + ", + ) + .build(); + let _p2 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test bar_bench ... bench: [..]"), + ); +} + +#[test] +fn external_bench_explicit() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bench]] + name = "bench" + path = "src/bench.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "src/bench.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", + p.url() + )) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]"), + ); +} + +#[test] +fn external_bench_implicit() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "benches/external.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]external-[..][EXE]", + p.url() + )) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]"), + ); +} + +#[test] +fn dont_run_examples() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", r"") + .file( + "examples/dont-run-me-i-will-fail.rs", + r#" + fn main() { panic!("Examples should not be run by 'cargo test'"); } + "#, + ) + .build(); + assert_that(p.cargo("bench"), execs().with_status(0)); +} + +#[test] +fn pass_through_command_line() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] fn foo(_b: &mut test::Bencher) {} + #[bench] fn bar(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("bench").arg("bar"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test bar ... bench: [..]"), + ); + + assert_that( + p.cargo("bench").arg("foo"), + execs() + .with_status(0) + .with_stderr( + "[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + ) + .with_stdout_contains("test foo ... bench: [..]"), + ); +} + +// Regression test for running cargo-bench twice with +// tests in an rlib +#[test] +fn cargo_bench_twice() { + if !is_nightly() { + return; + } + + let p = project("test_twice") + .file("Cargo.toml", &basic_lib_manifest("test_twice")) + .file( + "src/test_twice.rs", + r#" + #![crate_type = "rlib"] + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + fn dummy_bench(b: &mut test::Bencher) { } + "#, + ) + .build(); + + p.cargo("build"); + + for _ in 0..2 { + assert_that(p.cargo("bench"), execs().with_status(0)); + } +} + +#[test] +fn lib_bin_same_name() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "src/main.rs", + " + #![cfg_attr(test, feature(test))] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains_n("test [..] ... bench: [..]", 2), + ); +} + +#[test] +fn lib_with_standard_name() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "benches/bench.rs", + " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE] +[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test foo_bench ... bench: [..]") + .with_stdout_contains("test bench ... bench: [..]"), + ); +} + +#[test] +fn lib_with_standard_name2() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + bench = false + doctest = false + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate syntax; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); +} + +#[test] +fn bench_dylib() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + extern crate bar as the_bar; + #[cfg(test)] + extern crate test; + + pub fn bar() { the_bar::baz(); } + + #[bench] + fn foo(_b: &mut test::Bencher) {} + "#, + ) + .file( + "benches/bench.rs", + r#" + #![feature(test)] + extern crate foo as the_foo; + extern crate test; + + #[bench] + fn foo(_b: &mut test::Bencher) { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file( + "bar/src/lib.rs", + " + pub fn baz() {} + ", + ) + .build(); + + assert_that( + p.cargo("bench").arg("-v"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench` +[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", + dir = p.url() + )) + .with_stdout_contains_n("test foo ... bench: [..]", 2), + ); + + p.root().move_into_the_past(); + assert_that( + p.cargo("bench").arg("-v"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[FRESH] bar v0.0.1 ({dir}/bar) +[FRESH] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench` +[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", + dir = p.url() + )) + .with_stdout_contains_n("test foo ... bench: [..]", 2), + ); +} + +#[test] +fn bench_twice_with_build_cmd() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] + fn foo(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test foo ... bench: [..]"), + ); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr( + "[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + ) + .with_stdout_contains("test foo ... bench: [..]"), + ); +} + +#[test] +fn bench_with_examples() { + if !is_nightly() { + return; + } + + let p = project("testbench") + .file( + "Cargo.toml", + r#" + [package] + name = "testbench" + version = "6.6.6" + authors = [] + + [[example]] + name = "teste1" + + [[bench]] + name = "testb1" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[cfg(test)] + use test::Bencher; + + pub fn f1() { + println!("f1"); + } + + pub fn f2() {} + + #[bench] + fn bench_bench1(_b: &mut Bencher) { + f2(); + } + "#, + ) + .file( + "benches/testb1.rs", + " + #![feature(test)] + extern crate testbench; + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bench2(_b: &mut Bencher) { + testbench::f2(); + } + ", + ) + .file( + "examples/teste1.rs", + r#" + extern crate testbench; + + fn main() { + println!("example1"); + testbench::f1(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("-v"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] testbench v6.6.6 ({url}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `{dir}[/]target[/]release[/]deps[/]testbench-[..][EXE] --bench` +[RUNNING] `{dir}[/]target[/]release[/]deps[/]testb1-[..][EXE] --bench`", + dir = p.root().display(), + url = p.url() + )) + .with_stdout_contains("test bench_bench1 ... bench: [..]") + .with_stdout_contains("test bench_bench2 ... bench: [..]"), + ); +} + +#[test] +fn test_a_bench() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + name = "foo" + test = false + doctest = false + + [[bench]] + name = "b" + test = true + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/b.rs", + r#" + #[test] + fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]", + ) + .with_stdout_contains("test foo ... ok"), + ); +} + +#[test] +fn test_bench_no_run() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bbaz.rs", + r#" + #![feature(test)] + + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_: &mut Bencher) {} + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--no-run"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +#[test] +fn test_bench_no_fail_fast() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + } + + #[bench] + fn bench_nope(_b: &mut test::Bencher) { + assert_eq!("nope", hello()) + }"#, + ) + .build(); + + assert_that( + p.cargo("bench") + .arg("--no-fail-fast") + .arg("--") + .arg("--test-threads=1"), + execs() + .with_status(101) + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("running 2 tests") + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_hello [..]") + .with_stdout_contains("test bench_nope [..]"), + ); +} + +#[test] +fn test_bench_multiple_packages() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbar" + test = true + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bbar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_b: &mut Bencher) {} + "#, + ) + .build(); + + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [project] + name = "baz" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbaz" + test = true + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bbaz.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_b: &mut Bencher) {} + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("-p").arg("bar").arg("-p").arg("baz"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]") + .with_stdout_contains("test bench_baz ... bench: [..]") + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]"), + ); +} + +#[test] +fn bench_all_workspace() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]"), + ); +} + +#[test] +fn bench_all_exclude() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + pub fn bar(b: &mut test::Bencher) { + b.iter(|| {}); + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + "#, + ) + .file( + "baz/src/lib.rs", + r#" + #[test] + pub fn baz() { + break_the_build(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--all").arg("--exclude").arg("baz"), + execs().with_status(0).with_stdout_contains( + "\ +running 1 test +test bar ... bench: [..] ns/iter (+/- [..])", + ), + ); +} + +#[test] +fn bench_all_virtual_manifest() { + if !is_nightly() { + return; + } + + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "foo/benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that( + p.cargo("bench").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]"), + ); +} + +// https://github.com/rust-lang/cargo/issues/4287 +#[test] +fn legacy_bench_name() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [[bench]] + name = "bench" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "src/bench.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + assert_that( + p.cargo("bench"), + execs().with_status(0).with_stderr_contains( + "\ +[WARNING] path `[..]src[/]bench.rs` was erroneously implicitly accepted for benchmark `bench`, +please set bench.path in Cargo.toml", + ), + ); +} + +#[test] +fn bench_virtual_manifest_all_implied() { + if !is_nightly() { + return; + } + + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "foo/benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + // The order in which foo and bar are built is not guaranteed + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]"), + ); +} diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs new file mode 100644 index 000000000..7cc6b6ffd --- /dev/null +++ b/tests/testsuite/build.rs @@ -0,0 +1,5560 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargo::util::paths::dylib_path_envvar; +use cargo::util::{process, ProcessBuilder}; +use cargotest::{is_nightly, rustc_host, sleep_ms}; +use cargotest::support::paths::{root, CargoPathExt}; +use cargotest::support::ProjectBuilder; +use cargotest::support::{basic_bin_manifest, execs, main_file, project}; +use cargotest::support::registry::Package; +use cargotest::ChannelChanger; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; +use tempfile; + +#[test] +fn cargo_compile_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n"), + ); +} + +#[test] +fn cargo_fail_with_no_stderr() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &String::from("refusal")) + .build(); + assert_that( + p.cargo("build").arg("--message-format=json"), + execs() + .with_status(101) + .with_stderr_does_not_contain("--- stderr"), + ); +} + +/// Check that the `CARGO_INCREMENTAL` environment variable results in +/// `rustc` getting `-Zincremental` passed to it. +#[test] +fn cargo_compile_incremental() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs() + .with_stderr_contains( + "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n", + ) + .with_status(0), + ); + + assert_that( + p.cargo("test").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs() + .with_stderr_contains( + "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n", + ) + .with_status(0), + ); +} + +#[test] +fn incremental_profile() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [profile.dev] + incremental = false + + [profile.release] + incremental = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"), + execs() + .with_stderr_does_not_contain("[..]C incremental=[..]") + .with_status(0), + ); + + assert_that( + p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs() + .with_stderr_contains("[..]C incremental=[..]") + .with_status(0), + ); + + assert_that( + p.cargo("build") + .arg("--release") + .arg("-v") + .env_remove("CARGO_INCREMENTAL"), + execs() + .with_stderr_contains("[..]C incremental=[..]") + .with_status(0), + ); + + assert_that( + p.cargo("build") + .arg("--release") + .arg("-v") + .env("CARGO_INCREMENTAL", "0"), + execs() + .with_stderr_does_not_contain("[..]C incremental=[..]") + .with_status(0), + ); +} + +#[test] +fn incremental_config() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [build] + incremental = false + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"), + execs() + .with_stderr_does_not_contain("[..]C incremental=[..]") + .with_status(0), + ); + + assert_that( + p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"), + execs() + .with_stderr_contains("[..]C incremental=[..]") + .with_status(0), + ); +} + +#[test] +fn cargo_compile_with_workspace_excluded() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--all").arg("--exclude").arg("foo"), + execs() + .with_stderr_does_not_contain("[..]virtual[..]") + .with_stderr_contains("[..]no packages to compile") + .with_status(101), + ); +} + +#[test] +fn cargo_compile_manifest_path() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build") + .arg("--manifest-path") + .arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn cargo_compile_with_invalid_manifest() { + let p = project("foo").file("Cargo.toml", "").build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + virtual manifests must be configured with [workspace] +", + ), + ) +} + +#[test] +fn cargo_compile_with_invalid_manifest2() { + let p = project("foo") + .file( + "Cargo.toml", + r" + [project] + foo = bar + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 3 +", + ), + ) +} + +#[test] +fn cargo_compile_with_invalid_manifest3() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/Cargo.toml", "a = bar") + .build(); + + assert_that( + p.cargo("build") + .arg("--manifest-path") + .arg("src/Cargo.toml"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 1 +", + ), + ) +} + +#[test] +fn cargo_compile_duplicate_build_targets() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "main" + path = "src/main.rs" + crate-type = ["dylib"] + + [dependencies] + "#, + ) + .file( + "src/main.rs", + r#" + #![allow(warnings)] + fn main() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: file found to be present in multiple build targets: [..]main.rs +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn cargo_compile_with_invalid_version() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "1.0" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Expected dot for key `project.version` +", + ), + ) +} + +#[test] +fn cargo_compile_with_invalid_package_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "" + authors = [] + version = "0.0.0" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + package name cannot be an empty string +", + ), + ) +} + +#[test] +fn cargo_compile_with_invalid_bin_target_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target names cannot be empty +", + ), + ) +} + +#[test] +fn cargo_compile_with_forbidden_bin_target_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "build" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the binary target name `build` is forbidden +", + ), + ) +} + +#[test] +fn cargo_compile_with_bin_and_crate_type() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "the_foo_bin" + path = "src/foo.rs" + crate-type = ["cdylib", "rlib"] + "#, + ) + .file("src/foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the target `the_foo_bin` is a binary and can't have any crate-types set \ +(currently \"cdylib, rlib\")", + ), + ) +} + +#[test] +fn cargo_compile_with_bin_and_proc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "the_foo_bin" + path = "src/foo.rs" + proc-macro = true + "#, + ) + .file("src/foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`", + ), + ) +} + +#[test] +fn cargo_compile_with_invalid_lib_target_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [lib] + name = "" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + library target names cannot be empty +", + ), + ) +} + +#[test] +fn cargo_compile_without_manifest() { + let tmpdir = tempfile::Builder::new().prefix("cargo").tempdir().unwrap(); + let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()).build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory"), + ); +} + +#[test] +fn cargo_compile_with_invalid_code() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "invalid rust code!") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +[ERROR] Could not compile `foo`. + +To learn more, run the command again with --verbose.\n", + ), + ); + assert_that(&p.root().join("Cargo.lock"), existing_file()); +} + +#[test] +fn cargo_compile_with_invalid_code_in_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file("src/main.rs", "invalid rust code!") + .build(); + let _bar = project("bar") + .file("Cargo.toml", &basic_bin_manifest("bar")) + .file("src/lib.rs", "invalid rust code!") + .build(); + let _baz = project("baz") + .file("Cargo.toml", &basic_bin_manifest("baz")) + .file("src/lib.rs", "invalid rust code!") + .build(); + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn cargo_compile_with_warnings_in_the_root_package() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "fn main() {} fn dead() {}") + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr_contains("[..]function is never used: `dead`[..]"), + ); +} + +#[test] +fn cargo_compile_with_warnings_in_a_dep_package() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn gimme() -> &'static str { + "test passed" + } + + fn dead() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr_contains("[..]function is never used: `dead`[..]"), + ); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n"), + ); +} + +#[test] +fn cargo_compile_with_nested_deps_inferred() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = 'bar' + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "baz/src/lib.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n"), + ); +} + +#[test] +fn cargo_compile_with_nested_deps_correct_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "baz/src/lib.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n"), + ); +} + +#[test] +fn cargo_compile_with_nested_deps_shorthand() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#, + ) + .file( + "baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n"), + ); +} + +#[test] +fn cargo_compile_with_nested_deps_longhand() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + version = "0.5.0" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + version = "0.5.0" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#, + ) + .file( + "baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs()); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("libbar.rlib"), is_not(existing_file())); + assert_that(&p.bin("libbaz.rlib"), is_not(existing_file())); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("test passed\n"), + ); +} + +// Check that Cargo gives a sensible error if a dependency can't be found +// because of a name mismatch. +#[test] +fn cargo_compile_with_dep_name_mismatch() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [[bin]] + + name = "foo" + + [dependencies.notquitebar] + + path = "bar" + "#, + ) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"])) + .file("bar/Cargo.toml", &basic_bin_manifest("bar")) + .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr(&format!( + r#"error: no matching package named `notquitebar` found +location searched: {proj_dir}/bar +required by package `foo v0.0.1 ({proj_dir})` +"#, + proj_dir = p.url() + )), + ); +} + +#[test] +fn cargo_compile_with_filename() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "examples/a.rs", + r#" + fn main() { println!("example"); } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--bin").arg("bin.rs"), + execs() + .with_status(101) + .with_stderr("[ERROR] no bin target named `bin.rs`"), + ); + + assert_that( + p.cargo("build").arg("--bin").arg("a.rs"), + execs().with_status(101).with_stderr( + "\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?", + ), + ); + + assert_that( + p.cargo("build").arg("--example").arg("example.rs"), + execs() + .with_status(101) + .with_stderr("[ERROR] no example target named `example.rs`"), + ); + + assert_that( + p.cargo("build").arg("--example").arg("a.rs"), + execs().with_status(101).with_stderr( + "\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?", + ), + ); +} + +#[test] +fn cargo_compile_path_with_offline() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs().with_status(0), + ); +} + +#[test] +fn cargo_compile_with_downloaded_dependency_with_offline() { + Package::new("present_dep", "1.2.3") + .file( + "Cargo.toml", + r#" + [project] + name = "present_dep" + version = "1.2.3" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + { + // make package downloaded + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); + } + + let p2 = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + present_dep = "1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p2.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] present_dep v1.2.3 +[COMPILING] bar v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" + )), + ); +} + +#[test] +fn cargo_compile_offline_not_try_update() { + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + not_cached_dep = "1.2.5" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs().with_status(101).with_stderr( + "\ +error: no matching package named `not_cached_dep` found +location searched: registry `[..]` +required by package `bar v0.1.0 ([..])` +As a reminder, you're using offline mode (-Z offline) \ +which can sometimes cause surprising resolution failures, \ +if this error is too confusing you may with to retry \ +without the offline flag.", + ), + ); +} + +#[test] +fn compile_offline_without_maxvers_cached() { + Package::new("present_dep", "1.2.1").publish(); + Package::new("present_dep", "1.2.2").publish(); + + Package::new("present_dep", "1.2.3") + .file( + "Cargo.toml", + r#" + [project] + name = "present_dep" + version = "1.2.3" + "#, + ) + .file( + "src/lib.rs", + r#"pub fn get_version()->&'static str {"1.2.3"}"#, + ) + .publish(); + + Package::new("present_dep", "1.2.5") + .file( + "Cargo.toml", + r#" + [project] + name = "present_dep" + version = "1.2.5" + "#, + ) + .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#) + .publish(); + + { + // make package cached + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "=1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); + } + + let p2 = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "1.2" + "#, + ) + .file( + "src/main.rs", + "\ +extern crate present_dep; +fn main(){ + println!(\"{}\", present_dep::get_version()); +}", + ) + .build(); + + assert_that( + p2.cargo("run") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] present_dep v1.2.3 +[COMPILING] foo v0.1.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] + Running `[..]`", + url = p2.url() + )) + .with_stdout("1.2.3"), + ); +} + +#[test] +fn incompatible_dependencies() { + Package::new("bad", "0.1.0").publish(); + Package::new("bad", "1.0.0").publish(); + Package::new("bad", "1.0.1").publish(); + Package::new("bad", "1.0.2").publish(); + Package::new("foo", "0.1.0").dep("bad", "0.1.0").publish(); + Package::new("bar", "0.1.1").dep("bad", "=1.0.0").publish(); + Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish(); + Package::new("baz", "0.1.2").dep("bad", ">=1.0.1").publish(); + Package::new("baz", "0.1.1").dep("bad", ">=1.0.1").publish(); + Package::new("baz", "0.1.0").dep("bad", ">=1.0.1").publish(); + + let p = project("transitive_load_test") + .file( + "Cargo.toml", + r#" + [project] + name = "incompatible_dependencies" + version = "0.0.1" + + [dependencies] + foo = "0.1.0" + bar = "0.1.0" + baz = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: failed to select a version for `bad`. + ... required by package `baz v0.1.0` + ... which is depended on by `incompatible_dependencies v0.0.1 ([..])` +versions that meet the requirements `>= 1.0.1` are: 1.0.2, 1.0.1 + +all possible versions conflict with previously selected packages. + + previously selected package `bad v1.0.0` + ... which is depended on by `bar v0.1.0` + ... which is depended on by `incompatible_dependencies v0.0.1 ([..])` + +failed to select a version for `bad` which could resolve this conflict", + ), + ); +} + +#[test] +fn incompatible_dependencies_with_multi_semver() { + Package::new("bad", "1.0.0").publish(); + Package::new("bad", "1.0.1").publish(); + Package::new("bad", "2.0.0").publish(); + Package::new("bad", "2.0.1").publish(); + Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish(); + Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish(); + + let p = project("transitive_load_test") + .file( + "Cargo.toml", + r#" + [project] + name = "incompatible_dependencies" + version = "0.0.1" + + [dependencies] + bar = "0.1.0" + baz = "0.1.0" + bad = ">=1.0.1, <=2.0.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: failed to select a version for `bad`. + ... required by package `incompatible_dependencies v0.0.1 ([..])` +versions that meet the requirements `>= 1.0.1, <= 2.0.0` are: 2.0.0, 1.0.1 + +all possible versions conflict with previously selected packages. + + previously selected package `bad v2.0.1` + ... which is depended on by `baz v0.1.0` + ... which is depended on by `incompatible_dependencies v0.0.1 ([..])` + + previously selected package `bad v1.0.0` + ... which is depended on by `bar v0.1.0` + ... which is depended on by `incompatible_dependencies v0.0.1 ([..])` + +failed to select a version for `bad` which could resolve this conflict", + ), + ); +} + +#[test] +fn compile_offline_while_transitive_dep_not_cached() { + let bar = Package::new("bar", "1.0.0"); + let bar_path = bar.archive_dst(); + bar.publish(); + + let mut content = Vec::new(); + + let mut file = File::open(bar_path.clone()).ok().unwrap(); + let _ok = file.read_to_end(&mut content).ok().unwrap(); + drop(file); + drop(File::create(bar_path.clone()).ok().unwrap()); + + Package::new("foo", "0.1.0").dep("bar", "1.0.0").publish(); + + let p = project("transitive_load_test") + .file( + "Cargo.toml", + r#" + [project] + name = "transitive_load_test" + version = "0.0.1" + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + // simulate download foo, but fail to download bar + let _out = p.cargo("build").exec_with_output(); + + drop(File::create(bar_path).ok().unwrap().write_all(&content)); + + assert_that( + p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs().with_status(101).with_stderr( + "\ +error: no matching package named `bar` found +location searched: registry `[..]` +required by package `foo v0.1.0` + ... which is depended on by `transitive_load_test v0.0.1 ([..]/transitive_load_test)` +As a reminder, you're using offline mode (-Z offline) \ +which can sometimes cause surprising resolution failures, \ +if this error is too confusing you may with to retry \ +without the offline flag.", + ), + ); +} + +#[test] +fn compile_path_dep_then_change_version() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + File::create(&p.root().join("bar/Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "bar" + version = "0.0.2" + authors = [] + "#, + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn ignores_carriage_return_in_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file( + "src/main.rs", + r#" + mod a; fn main() {} + "#, + ) + .file("src/a.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let lockfile = p.root().join("Cargo.lock"); + let mut lock = String::new(); + File::open(&lockfile) + .unwrap() + .read_to_string(&mut lock) + .unwrap(); + let lock = lock.replace("\n", "\r\n"); + File::create(&lockfile) + .unwrap() + .write_all(lock.as_bytes()) + .unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn cargo_default_env_metadata_env_var() { + // Ensure that path dep + dylib + env_var get metadata + // (even though path_dep + dylib should not) + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "// hi") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "// hello") + .build(); + + // No metadata on libbar since it's a dylib path dependency + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \ + --emit=dep-info,link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX, + )), + ); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + // If you set the env-var, then we expect metadata on libbar + assert_that( + p.cargo("build") + .arg("-v") + .env("__CARGO_DEFAULT_LIB_METADATA", "stable"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \ + --emit=dep-info,link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar-[..]{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX, + )), + ); +} + +#[test] +fn crate_env_vars() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + description = "This is foo" + homepage = "http://example.com" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + + static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); + static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); + static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); + static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); + static VERSION: &'static str = env!("CARGO_PKG_VERSION"); + static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); + static PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); + static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE"); + static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION"); + + fn main() { + let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE, + CARGO_MANIFEST_DIR); + assert_eq!(s, foo::version()); + println!("{}", s); + assert_eq!("foo", PKG_NAME); + assert_eq!("http://example.com", HOMEPAGE); + assert_eq!("This is foo", DESCRIPTION); + let s = format!("{}.{}.{}-{}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE); + assert_eq!(s, VERSION); + } + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn version() -> String { + format!("{}-{}-{} @ {} in {}", + env!("CARGO_PKG_VERSION_MAJOR"), + env!("CARGO_PKG_VERSION_MINOR"), + env!("CARGO_PKG_VERSION_PATCH"), + env!("CARGO_PKG_VERSION_PRE"), + env!("CARGO_MANIFEST_DIR")) + } + "#, + ) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + println!("bin"); + assert_that( + process(&p.bin("foo")), + execs() + .with_status(0) + .with_stdout(&format!("0-5-1 @ alpha.1 in {}\n", p.root().display())), + ); + + println!("test"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn crate_authors_env_vars() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + authors = ["wycats@example.com", "neikos@example.com"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS"); + + fn main() { + let s = "wycats@example.com:neikos@example.com"; + assert_eq!(AUTHORS, foo::authors()); + println!("{}", AUTHORS); + assert_eq!(s, AUTHORS); + } + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn authors() -> String { + format!("{}", env!("CARGO_PKG_AUTHORS")) + } + "#, + ) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + println!("bin"); + assert_that( + process(&p.bin("foo")), + execs() + .with_status(0) + .with_stdout("wycats@example.com:neikos@example.com"), + ); + + println!("test"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error +fn setenv_for_removing_empty_component(mut p: ProcessBuilder) -> ProcessBuilder { + let v = dylib_path_envvar(); + if let Ok(search_path) = env::var(v) { + let new_search_path = env::join_paths( + env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()), + ).expect("join_paths"); + p.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly + } + p +} + +// Regression test for #4277 +#[test] +fn crate_library_path_env_var() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = env!("{}"); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(!paths.contains(&"".into())); + }} + "##, + dylib_path_envvar() + ), + ) + .build(); + + assert_that( + setenv_for_removing_empty_component(p.cargo("run")), + execs().with_status(0), + ); +} + +// Regression test for #4277 +#[test] +fn build_with_fake_libc_not_loading() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .file("libc.so.6", r#""#) + .build(); + + assert_that( + setenv_for_removing_empty_component(p.cargo("build")), + execs().with_status(0), + ); +} + +// this is testing that src/.rs still works (for now) +#[test] +fn many_crate_types_old_style_lib_location() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#, + ) + .file( + "src/foo.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr_contains( + "\ +[WARNING] path `[..]src[/]foo.rs` was erroneously implicitly accepted for library `foo`, +please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", + ), + ); + + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert_that(&p.root().join("target/debug").join(&fname), existing_file()); +} + +#[test] +fn many_crate_types_correct() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert_that(&p.root().join("target/debug").join(&fname), existing_file()); +} + +#[test] +fn self_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.test] + + path = "." + + [lib] + name = "test" + path = "src/test.rs" + "#, + ) + .file("src/test.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] cyclic package dependency: package `test v0.0.0 ([..])` depends on itself. Cycle: +package `test v0.0.0 ([..]foo)`", + ), + ); +} + +#[test] +fn ignore_broken_symlinks() { + // windows and symlinks don't currently agree that well + if cfg!(windows) { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .symlink("Notafile", "bar") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n"), + ); +} + +#[test] +fn missing_lib_and_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]Cargo.toml` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n", + ), + ); +} + +#[test] +fn lto_build() { + // FIXME: currently this hits a linker bug on 32-bit MSVC + if cfg!(all(target_env = "msvc", target_pointer_width = "32")) { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + lto = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]main.rs --crate-type bin \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C lto \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[FINISHED] release [optimized] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + )), + ); +} + +#[test] +fn verbose_build() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + )), + ); +} + +#[test] +fn verbose_release_build() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps` +[FINISHED] release [optimized] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + )), + ); +} + +#[test] +fn verbose_release_build_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.foo] + path = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}/foo) +[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \ + --crate-type dylib --crate-type rlib \ + --emit=dep-info,link \ + -C prefer-dynamic \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern foo={dir}[/]target[/]release[/]deps[/]{prefix}foo{suffix} \ + --extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib` +[FINISHED] release [optimized] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX + )), + ); +} + +#[test] +fn explicit_examples() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [lib] + name = "world" + path = "src/lib.rs" + + [[example]] + name = "hello" + path = "examples/ex-hello.rs" + + [[example]] + name = "goodbye" + path = "examples/ex-goodbye.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#, + ) + .file( + "examples/ex-hello.rs", + r#" + extern crate world; + fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } + "#, + ) + .file( + "examples/ex-goodbye.rs", + r#" + extern crate world; + fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } + "#, + ) + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + assert_that( + process(&p.bin("examples/hello")), + execs().with_status(0).with_stdout("Hello, World!\n"), + ); + assert_that( + process(&p.bin("examples/goodbye")), + execs().with_status(0).with_stdout("Goodbye, World!\n"), + ); +} + +#[test] +fn non_existing_example() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [lib] + name = "world" + path = "src/lib.rs" + + [[example]] + name = "hello" + "#, + ) + .file("src/lib.rs", "") + .file("examples/ehlo.rs", "") + .build(); + + assert_that( + p.cargo("test").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `hello` example, specify example.path", + ), + ); +} + +#[test] +fn non_existing_binary() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + + [[bin]] + name = "hello" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/ehlo.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `hello` bin, specify bin.path", + ), + ); +} + +#[test] +fn legacy_binary_paths_warinigs() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr_contains( + "\ +[WARNING] path `[..]src[/]main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ), + ); + + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr_contains( + "\ +[WARNING] path `[..]src[/]bin[/]main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ), + ); + + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/bar.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr_contains( + "\ +[WARNING] path `[..]src[/]bar.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ), + ); +} + +#[test] +fn implicit_examples() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [package] + name = "world" + version = "1.0.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#, + ) + .file( + "examples/hello.rs", + r#" + extern crate world; + fn main() { + println!("{}, {}!", world::get_hello(), world::get_world()); + } + "#, + ) + .file( + "examples/goodbye.rs", + r#" + extern crate world; + fn main() { + println!("{}, {}!", world::get_goodbye(), world::get_world()); + } + "#, + ) + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that( + process(&p.bin("examples/hello")), + execs().with_status(0).with_stdout("Hello, World!\n"), + ); + assert_that( + process(&p.bin("examples/goodbye")), + execs().with_status(0).with_stdout("Goodbye, World!\n"), + ); +} + +#[test] +fn standard_build_no_ndebug() { + let p = project("world") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("slow\n"), + ); +} + +#[test] +fn release_build_ndebug() { + let p = project("world") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("--release"), execs().with_status(0)); + assert_that( + process(&p.release_bin("foo")), + execs().with_status(0).with_stdout("fast\n"), + ); +} + +#[test] +fn inferred_main_bin() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(process(&p.bin("foo")), execs().with_status(0)); +} + +#[test] +fn deletion_causes_failure() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ); + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn bad_cargo_toml_in_target_dir() { + let p = project("world") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("target/Cargo.toml", "bad-toml") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(process(&p.bin("foo")), execs().with_status(0)); +} + +#[test] +fn lib_with_standard_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + fn main() { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn simple_staticlib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib"] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + // env var is a test for #1381 + assert_that( + p.cargo("build").env("RUST_LOG", "nekoneko=trace"), + execs().with_status(0), + ); +} + +#[test] +fn staticlib_rlib_and_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib", "rlib"] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + r#" + extern crate foo; + + fn main() { + foo::foo(); + }"#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn opt_out_of_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + bin = [] + + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "bad syntax") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn single_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + path = "src/bar.rs" + "#, + ) + .file("src/bar.rs", "") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn freshness_ignores_excluded() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + exclude = ["src/b*.rs"] + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + assert_that( + foo.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn rebuild_preserves_out_dir() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::path::Path; + + fn main() { + let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); + if env::var_os("FIRST").is_some() { + File::create(&path).unwrap(); + } else { + File::create(&path).unwrap(); + } + } + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + assert_that( + foo.cargo("build").env("FIRST", "1"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ); + + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that( + foo.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ); +} + +#[test] +fn dep_no_libs() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "") + .build(); + assert_that(foo.cargo("build"), execs().with_status(0)); +} + +#[test] +fn recompile_space_in_name() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + path = "src/my lib.rs" + "#, + ) + .file("src/my lib.rs", "") + .build(); + assert_that(foo.cargo("build"), execs().with_status(0)); + foo.root().move_into_the_past(); + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[cfg(unix)] +#[test] +fn ignore_bad_directories() { + use std::os::unix::prelude::*; + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + let dir = foo.root().join("tmp"); + fs::create_dir(&dir).unwrap(); + let stat = fs::metadata(&dir).unwrap(); + let mut perms = stat.permissions(); + perms.set_mode(0o644); + fs::set_permissions(&dir, perms.clone()).unwrap(); + assert_that(foo.cargo("build"), execs().with_status(0)); + perms.set_mode(0o755); + fs::set_permissions(&dir, perms).unwrap(); +} + +#[test] +fn bad_cargo_config() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + this is not valid toml + "#, + ) + .build(); + assert_that( + foo.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Couldn't load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found an identifier at line 2 +", + ), + ); +} + +#[test] +fn cargo_platform_specific_dependency() { + let host = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [target.{host}.dependencies] + dep = {{ path = "dep" }} + [target.{host}.build-dependencies] + build = {{ path = "build" }} + [target.{host}.dev-dependencies] + dev = {{ path = "dev" }} + "#, + host = host + ), + ) + .file( + "src/main.rs", + r#" + extern crate dep; + fn main() { dep::dep() } + "#, + ) + .file( + "tests/foo.rs", + r#" + extern crate dev; + #[test] + fn foo() { dev::dev() } + "#, + ) + .file( + "build.rs", + r#" + extern crate build; + fn main() { build::build(); } + "#, + ) + .file( + "dep/Cargo.toml", + r#" + [project] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("dep/src/lib.rs", "pub fn dep() {}") + .file( + "build/Cargo.toml", + r#" + [project] + name = "build" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("build/src/lib.rs", "pub fn build() {}") + .file( + "dev/Cargo.toml", + r#" + [project] + name = "dev" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("dev/src/lib.rs", "pub fn dev() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn bad_platform_specific_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.wrong-target.dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + format!("") + } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn cargo_platform_specific_dependency_wrong_platform() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.non-existing-triplet.dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + invalid rust file, should not be compiled + "#, + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(process(&p.bin("foo")), execs().with_status(0)); + + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + File::open(&loc) + .unwrap() + .read_to_string(&mut lockfile) + .unwrap(); + assert!(lockfile.contains("bar")) +} + +#[test] +fn example_as_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "lib"), existing_file()); +} + +#[test] +fn example_as_rlib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "rlib"), existing_file()); +} + +#[test] +fn example_as_dylib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "dylib"), existing_file()); +} + +#[test] +fn example_as_proc_macro() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["proc-macro"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "#![feature(proc_macro)]") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that(&p.example_lib("ex", "proc-macro"), existing_file()); +} + +#[test] +fn example_bin_same_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("test") + .arg("--no-run") + .arg("-v") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), is_not(existing_file())); + // We expect a file of the form bin/foo-{metadata_hash} + assert_that(&p.bin("examples/foo"), existing_file()); + + p.cargo("test") + .arg("--no-run") + .arg("-v") + .exec_with_output() + .unwrap(); + + assert_that(&p.bin("foo"), is_not(existing_file())); + // We expect a file of the form bin/foo-{metadata_hash} + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn compile_then_delete() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("run").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + if cfg!(windows) { + // On windows unlinking immediately after running often fails, so sleep + sleep_ms(100); + } + fs::remove_file(&p.bin("foo")).unwrap(); + assert_that(p.cargo("run").arg("-v"), execs().with_status(0)); +} + +#[test] +fn transitive_dependencies_not_available() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.aaaaa] + path = "a" + "#, + ) + .file( + "src/main.rs", + "extern crate bbbbb; extern crate aaaaa; fn main() {}", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "aaaaa" + version = "0.0.1" + authors = [] + + [dependencies.bbbbb] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate bbbbb;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "bbbbb" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(101) + .with_stderr_contains("[..] can't find crate for `bbbbb`[..]"), + ); +} + +#[test] +fn cyclic_deps_rejected() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = ".." + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), + execs().with_status(101) + .with_stderr( +r#"[ERROR] cyclic package dependency: package `a v0.0.1 ([..])` depends on itself. Cycle: +package `a v0.0.1 ([..]a)` + ... which is depended on by `foo v0.0.1 ([..]foo)`[..]"#)); +} + +#[test] +fn predictable_filenames() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib", "rlib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); + let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert_that( + &p.root().join("target/debug").join(dylib_name), + existing_file(), + ); +} + +#[test] +fn dashes_to_underscores() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo-bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("foo-bar"), existing_file()); +} + +#[test] +fn dashes_in_crate_name_bad() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo-bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(101)); +} + +#[test] +fn rustc_env_var() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build") + .env("RUSTC", "rustc-that-does-not-exist") + .arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..]) + +Caused by: +[..] +", + ), + ); + assert_that(&p.bin("a"), is_not(existing_file())); +} + +#[test] +fn filtering() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--lib"), execs().with_status(0)); + assert_that(&p.bin("a"), is_not(existing_file())); + + assert_that( + p.cargo("build").arg("--bin=a").arg("--example=a"), + execs().with_status(0), + ); + assert_that(&p.bin("a"), existing_file()); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/a"), existing_file()); + assert_that(&p.bin("examples/b"), is_not(existing_file())); +} + +#[test] +fn filtering_implicit_bins() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--bins"), execs().with_status(0)); + assert_that(&p.bin("a"), existing_file()); + assert_that(&p.bin("b"), existing_file()); + assert_that(&p.bin("examples/a"), is_not(existing_file())); + assert_that(&p.bin("examples/b"), is_not(existing_file())); +} + +#[test] +fn filtering_implicit_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--examples"), execs().with_status(0)); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/a"), existing_file()); + assert_that(&p.bin("examples/b"), existing_file()); +} + +#[test] +fn ignore_dotfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/.a.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn ignore_dotdirs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/a.rs", "fn main() {}") + .file(".git/Cargo.toml", "") + .file(".pc/dummy-fix.patch/Cargo.toml", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn dotdir_root() { + let p = ProjectBuilder::new("foo", root().join(".foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/a.rs", "fn main() {}") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn custom_target_dir() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); + + assert_that( + p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), + execs().with_status(0), + ); + assert_that( + &p.root().join("foo/target/debug").join(&exe_name), + existing_file(), + ); + assert_that( + &p.root().join("target/debug").join(&exe_name), + is_not(existing_file()), + ); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + &p.root().join("foo/target/debug").join(&exe_name), + existing_file(), + ); + assert_that( + &p.root().join("target/debug").join(&exe_name), + existing_file(), + ); + + fs::create_dir(p.root().join(".cargo")).unwrap(); + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + br#" + [build] + target-dir = "foo/target" + "#, + ) + .unwrap(); + assert_that( + p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"), + execs().with_status(0), + ); + assert_that( + &p.root().join("bar/target/debug").join(&exe_name), + existing_file(), + ); + assert_that( + &p.root().join("foo/target/debug").join(&exe_name), + existing_file(), + ); + assert_that( + &p.root().join("target/debug").join(&exe_name), + existing_file(), + ); +} + +#[test] +fn rustc_no_trans() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"), + execs().with_status(0), + ); +} + +#[test] +fn build_multiple_packages() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#, + ) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + assert_that(p.cargo("build -p d1 -p d2 -p foo"), execs().with_status(0)); + + assert_that(&p.bin("foo"), existing_file()); + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("i am foo\n"), + ); + + let d1_path = &p.build_dir() + .join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir() + .join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert_that(d1_path, existing_file()); + assert_that(process(d1_path), execs().with_status(0).with_stdout("d1")); + + assert_that(d2_path, existing_file()); + assert_that(process(d2_path), execs().with_status(0).with_stdout("d2")); +} + +#[test] +fn invalid_spec() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + + [[bin]] + name = "foo" + "#, + ) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[])) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#, + ) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .build(); + + assert_that( + p.cargo("build").arg("-p").arg("notAValidDep"), + execs() + .with_status(101) + .with_stderr("[ERROR] package id specification `notAValidDep` matched no packages"), + ); + + assert_that( + p.cargo("build") + .arg("-p") + .arg("d1") + .arg("-p") + .arg("notAValidDep"), + execs() + .with_status(101) + .with_stderr("[ERROR] package id specification `notAValidDep` matched no packages"), + ); +} + +#[test] +fn manifest_with_bom_is_ok() { + let p = project("foo") + .file( + "Cargo.toml", + "\u{FEFF} + [package] + name = \"foo\" + version = \"0.0.1\" + authors = [] + ", + ) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn panic_abort_compiles_with_panic_abort() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] -C panic=abort [..]"), + ); +} + +#[test] +fn explicit_color_config_is_propagated_to_rustc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--color").arg("always"), + execs() + .with_status(0) + .with_stderr_contains("[..]rustc [..] src[/]lib.rs --color always[..]"), + ); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + assert_that( + p.cargo("build").arg("-v").arg("--color").arg("never"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] test v0.0.0 ([..]) +[RUNNING] `rustc [..] --color never [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn compiler_json_error_format() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() { let unused = 92; }") + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("bar/src/lib.rs", r#"fn dead() {}"#) + .build(); + + assert_that( + p.cargo("build") + .arg("-v") + .arg("--message-format") + .arg("json"), + execs().with_status(0).with_json( + r#" + { + "reason":"compiler-message", + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":["[..].rlib"], + "fresh": false + } + + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "filenames": "{...}", + "fresh": false + } +"#, + ), + ); + + // With fresh build, we should repeat the artifacts, + // but omit compiler warnings. + assert_that( + p.cargo("build") + .arg("-v") + .arg("--message-format") + .arg("json"), + execs().with_status(0).with_json( + r#" + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":["[..].rlib"], + "fresh": true + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "features": [], + "filenames": "{...}", + "fresh": true + } +"#, + ), + ); +} + +#[test] +fn wrong_message_format_option() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--message-format").arg("XML"), + execs().with_status(1).with_stderr_contains( + "\ +error: 'XML' isn't a valid value for '--message-format ' +[possible values: human, json] +", + ), + ); +} + +#[test] +fn message_format_json_forward_stderr() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() { let unused = 0; }") + .build(); + + assert_that( + p.cargo("rustc") + .arg("--release") + .arg("--bin") + .arg("foo") + .arg("--message-format") + .arg("JSON"), + execs().with_status(0).with_json( + r#" + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..]" + }, + "profile":{ + "debug_assertions":false, + "debuginfo":null, + "opt_level":"3", + "overflow_checks": false, + "test":false + }, + "features":[], + "filenames": "{...}", + "fresh": false + } +"#, + ), + ); +} + +#[test] +fn no_warn_about_package_metadata() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [package.metadata] + foo = "bar" + a = true + b = 3 + + [package.metadata.another] + bar = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "[..] foo v0.0.1 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn cargo_build_empty_target() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--target").arg(""), + execs() + .with_status(101) + .with_stderr_contains("[..] target was empty"), + ); +} + +#[test] +fn build_all_workspace() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--all"), + execs().with_status(0).with_stderr( + "[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn build_all_exclude() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + "#, + ) + .file( + "baz/src/lib.rs", + r#" + pub fn baz() { + break_the_build(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--all").arg("--exclude").arg("baz"), + execs() + .with_status(0) + .with_stderr_contains("[..]Compiling foo v0.1.0 [..]") + .with_stderr_contains("[..]Compiling bar v0.1.0 [..]") + .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]"), + ); +} + +#[test] +fn build_all_workspace_implicit_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/c.rs", "fn main() {}") + .file("examples/d.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/e.rs", "fn main() {}") + .file("bar/src/bin/f.rs", "fn main() {}") + .file("bar/examples/g.rs", "fn main() {}") + .file("bar/examples/h.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--all").arg("--examples"), + execs().with_status(0).with_stderr( + "[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/c"), existing_file()); + assert_that(&p.bin("examples/d"), existing_file()); + assert_that(&p.bin("e"), is_not(existing_file())); + assert_that(&p.bin("f"), is_not(existing_file())); + assert_that(&p.bin("examples/g"), existing_file()); + assert_that(&p.bin("examples/h"), existing_file()); +} + +#[test] +fn build_all_virtual_manifest() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that( + p.cargo("build").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn build_virtual_manifest_all_implied() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn build_virtual_manifest_one_project() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-p").arg("foo"), + execs() + .with_status(0) + .with_stderr_does_not_contain("bar") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn build_all_virtual_manifest_implicit_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file("foo/src/lib.rs", "") + .file("foo/src/bin/a.rs", "fn main() {}") + .file("foo/src/bin/b.rs", "fn main() {}") + .file("foo/examples/c.rs", "fn main() {}") + .file("foo/examples/d.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/e.rs", "fn main() {}") + .file("bar/src/bin/f.rs", "fn main() {}") + .file("bar/examples/g.rs", "fn main() {}") + .file("bar/examples/h.rs", "fn main() {}") + .build(); + + // The order in which foo and bar are built is not guaranteed + assert_that( + p.cargo("build").arg("--all").arg("--examples"), + execs() + .with_status(0) + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); + assert_that(&p.bin("a"), is_not(existing_file())); + assert_that(&p.bin("b"), is_not(existing_file())); + assert_that(&p.bin("examples/c"), existing_file()); + assert_that(&p.bin("examples/d"), existing_file()); + assert_that(&p.bin("e"), is_not(existing_file())); + assert_that(&p.bin("f"), is_not(existing_file())); + assert_that(&p.bin("examples/g"), existing_file()); + assert_that(&p.bin("examples/h"), existing_file()); +} + +#[test] +fn build_all_member_dependency_same_name() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that( + p.cargo("build").arg("--all"), + execs().with_status(0).with_stderr( + "[..] Updating registry `[..]`\n\ + [..] Downloading a v0.1.0 ([..])\n\ + [..] Compiling a v0.1.0\n\ + [..] Compiling a v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ), + ); +} + +#[test] +fn run_proper_binary() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "main" + [[bin]] + name = "other" + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/main.rs", + r#" + fn main() { + panic!("This should never be run."); + } + "#, + ) + .file( + "src/bin/other.rs", + r#" + fn main() { + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bin").arg("other"), + execs().with_status(0), + ); +} + +#[test] +fn run_proper_binary_main_rs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/main.rs", + r#" + fn main() { + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bin").arg("foo"), + execs().with_status(0), + ); +} + +#[test] +fn run_proper_alias_binary_from_src() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#, + ) + .file( + "src/foo.rs", + r#" + fn main() { + println!("foo"); + } + "#, + ) + .file( + "src/bar.rs", + r#" + fn main() { + println!("bar"); + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("--all"), execs().with_status(0)); + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("foo\n"), + ); + assert_that( + process(&p.bin("bar")), + execs().with_status(0).with_stdout("bar\n"), + ); +} + +#[test] +fn run_proper_alias_binary_main_rs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!("main"); + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("--all"), execs().with_status(0)); + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("main\n"), + ); + assert_that( + process(&p.bin("bar")), + execs().with_status(0).with_stdout("main\n"), + ); +} + +#[test] +fn run_proper_binary_main_rs_as_foo() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + "#, + ) + .file( + "src/foo.rs", + r#" + fn main() { + panic!("This should never be run."); + } + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bin").arg("foo"), + execs().with_status(0), + ); +} + +#[test] +fn rustc_wrapper() { + // We don't have /usr/bin/env on Windows. + if cfg!(windows) { + return; + } + + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build") + .arg("-v") + .env("RUSTC_WRAPPER", "/usr/bin/env"), + execs() + .with_stderr_contains("[RUNNING] `/usr/bin/env rustc --crate-name foo [..]") + .with_status(0), + ); +} + +#[test] +fn cdylib_not_lifted() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let files = if cfg!(windows) { + vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo.dylib"] + } else { + vec!["libfoo.so"] + }; + + for file in files { + println!("checking: {}", file); + assert_that( + &p.root().join("target/debug/deps").join(&file), + existing_file(), + ); + } +} + +#[test] +fn cdylib_final_outputs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo-bar" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let files = if cfg!(windows) { + vec!["foo_bar.dll.lib", "foo_bar.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo_bar.dylib"] + } else { + vec!["libfoo_bar.so"] + }; + + for file in files { + println!("checking: {}", file); + assert_that(&p.root().join("target/debug").join(&file), existing_file()); + } +} + +#[test] +fn deterministic_cfg_flags() { + // This bug is non-deterministic + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [features] + default = ["f_a", "f_b", "f_c", "f_d"] + f_a = [] + f_b = [] + f_c = [] + f_d = [] + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=cfg_a"); + println!("cargo:rustc-cfg=cfg_b"); + println!("cargo:rustc-cfg=cfg_c"); + println!("cargo:rustc-cfg=cfg_d"); + println!("cargo:rustc-cfg=cfg_e"); + } + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[RUNNING] [..] +[RUNNING] [..] +[RUNNING] `rustc --crate-name foo [..] \ +--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\ +--cfg[..]f_c[..]--cfg[..]f_d[..] \ +--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ), + ); +} + +#[test] +fn explicit_bins_without_paths() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn no_bin_in_src_with_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + "#, + ) + .file("src/lib.rs", "") + .file("src/foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `foo` bin, specify bin.path", + ), + ); +} + +#[test] +fn inferred_bins() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/baz/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); +} + +#[test] +fn inferred_bins_duplicate_name() { + // this should fail, because we have two binaries with the same name + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/foo/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "[..]found duplicate binary name foo, but all binary targets must have a unique name[..]", + ), + ); +} + +#[test] +fn inferred_bin_path() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "bar" + # Note, no `path` key! + "#, + ) + .file("src/bin/bar/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("bar"), existing_file()); +} + +#[test] +fn inferred_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "fn main() {}") + .file("examples/bar.rs", "fn main() {}") + .file("examples/baz/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that(&p.bin("examples/bar"), existing_file()); + assert_that(&p.bin("examples/baz"), existing_file()); +} + +#[test] +fn inferred_tests() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "fn main() {}") + .file("tests/bar.rs", "fn main() {}") + .file("tests/baz/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("test").arg("--test=bar").arg("--test=baz"), + execs().with_status(0), + ); +} + +#[test] +fn inferred_benchmarks() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "fn main() {}") + .file("benches/bar.rs", "fn main() {}") + .file("benches/baz/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("bench").arg("--bench=bar").arg("--bench=baz"), + execs().with_status(0), + ); +} + +#[test] +fn same_metadata_different_directory() { + // A top-level crate built in two different workspaces should have the + // same metadata hash. + let p = project("foo1") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + let output = t!(String::from_utf8( + t!(p.cargo("build").arg("-v").exec_with_output()).stderr, + )); + let metadata = output + .split_whitespace() + .find(|arg| arg.starts_with("metadata=")) + .unwrap(); + + let p = project("foo2") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains(format!("[..]{}[..]", metadata)), + ); +} + +#[test] +fn building_a_dependent_crate_witout_bin_should_fail() { + Package::new("testless", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "testless" + version = "0.1.0" + + [[bin]] + name = "a_bin" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr_contains("[..]can't find `a_bin` bin, specify bin.path"), + ); +} + +#[test] +fn uplift_dsym_of_bin_on_mac() { + if !cfg!(any(target_os = "macos", target_os = "ios")) { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() { panic!(); }") + .file("src/bin/b.rs", "fn main() { panic!(); }") + .file("examples/c.rs", "fn main() { panic!(); }") + .file("tests/d.rs", "fn main() { panic!(); }") + .build(); + + assert_that( + p.cargo("build") + .arg("--bins") + .arg("--examples") + .arg("--tests"), + execs().with_status(0), + ); + assert_that(&p.bin("foo.dSYM"), existing_dir()); + assert_that(&p.bin("b.dSYM"), existing_dir()); + assert!( + p.bin("b.dSYM") + .symlink_metadata() + .expect("read metadata from b.dSYM") + .file_type() + .is_symlink() + ); + assert_that(&p.bin("c.dSYM"), is_not(existing_dir())); + assert_that(&p.bin("d.dSYM"), is_not(existing_dir())); +} + +#[test] +fn uplift_pdb_of_bin_on_windows() { + if !cfg!(all(target_os = "windows", target_env = "msvc")) { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() { panic!(); }") + .file("src/bin/b.rs", "fn main() { panic!(); }") + .file("examples/c.rs", "fn main() { panic!(); }") + .file("tests/d.rs", "fn main() { panic!(); }") + .build(); + + assert_that( + p.cargo("build") + .arg("--bins") + .arg("--examples") + .arg("--tests"), + execs().with_status(0), + ); + assert_that(&p.target_debug_dir().join("foo.pdb"), existing_file()); + assert_that(&p.target_debug_dir().join("b.pdb"), existing_file()); + assert_that(&p.target_debug_dir().join("c.pdb"), is_not(existing_file())); + assert_that(&p.target_debug_dir().join("d.pdb"), is_not(existing_file())); +} + +// Make sure that `cargo build` chooses the correct profile for building +// targets based on filters (assuming --profile is not specified). +#[test] +fn build_filter_infer_profile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/t1.rs", "") + .file("benches/b1.rs", "") + .file("examples/ex1.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link[..]", + ) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]", + ), + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("build").arg("-v").arg("--test=t1"), + execs() + .with_status(0) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link[..]", + ) + .with_stderr_contains( + "[RUNNING] `rustc --crate-name t1 tests[/]t1.rs --emit=dep-info,link[..]", + ) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]", + ), + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("build").arg("-v").arg("--bench=b1"), + execs() + .with_status(0) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link[..]", + ) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name b1 benches[/]b1.rs --emit=dep-info,link \ + -C opt-level=3[..]", + ) + .with_stderr_contains( + "\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]", + ), + ); +} + +#[test] +fn targets_selected_default() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0) + // bin + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]") + // bench + .with_stderr_does_not_contain("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C opt-level=3 --test [..]") + // unit test + .with_stderr_does_not_contain("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C debuginfo=2 --test [..]"), + ); +} + +#[test] +fn targets_selected_all() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--all-targets"), + execs().with_status(0) + // bin + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]") + // bench + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C opt-level=3 --test [..]") + // unit test + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C debuginfo=2 --test [..]"), + ); +} + +#[test] +fn all_targets_no_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--all-targets"), + execs().with_status(0) + // bin + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]") + // bench + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C opt-level=3 --test [..]") + // unit test + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C debuginfo=2 --test [..]"), + ); +} + +#[test] +fn no_linkable_target() { + // Issue 3169. This is currently not an error as per discussion in PR #4797 + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + [dependencies] + the_lib = { path = "the_lib" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "the_lib/Cargo.toml", + r#" + [package] + name = "the_lib" + version = "0.1.0" + [lib] + name = "the_lib" + crate-type = ["staticlib"] + "#, + ) + .file("the_lib/src/lib.rs", "pub fn foo() {}") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr_contains( + "\ + [WARNING] The package `the_lib` provides no linkable [..] \ + while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]", + ), + ); +} + +#[test] +fn avoid_dev_deps() { + Package::new("foo", "1.0.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dev-dependencies] + baz = "1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(101)); + assert_that( + p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zavoid-dev-deps"), + execs().with_status(0), + ); +} + +#[test] +fn invalid_jobs() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("build").arg("--jobs").arg("over9000"), + execs() + .with_status(1) + .with_stderr("error: Invalid value: could not parse `over9000` as a number"), + ); +} diff --git a/tests/testsuite/build_auth.rs b/tests/testsuite/build_auth.rs new file mode 100644 index 000000000..01df88036 --- /dev/null +++ b/tests/testsuite/build_auth.rs @@ -0,0 +1,269 @@ +use std; +use std::collections::HashSet; +use std::io::prelude::*; +use std::net::TcpListener; +use std::thread; + +use git2; +use bufstream::BufStream; +use cargotest::support::paths; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +// Test that HTTP auth is offered from `credential.helper` +#[test] +fn http_auth_offered() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + + fn headers(rdr: &mut BufRead) -> HashSet { + let valid = ["GET", "Authorization", "Accept", "User-Agent"]; + rdr.lines() + .map(|s| s.unwrap()) + .take_while(|s| s.len() > 2) + .map(|s| s.trim().to_string()) + .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix))) + .collect() + } + + let t = thread::spawn(move || { + let mut conn = BufStream::new(server.accept().unwrap().0); + let req = headers(&mut conn); + let user_agent = "User-Agent: git/2.0 (libgit2 0.27.0)"; + conn.write_all( + b"\ + HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n + \r\n\ + ", + ).unwrap(); + assert_eq!( + req, + vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Accept: */*", + user_agent, + ].into_iter() + .map(|s| s.to_string()) + .collect() + ); + drop(conn); + + let mut conn = BufStream::new(server.accept().unwrap().0); + let req = headers(&mut conn); + conn.write_all( + b"\ + HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n + \r\n\ + ", + ).unwrap(); + assert_eq!( + req, + vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Authorization: Basic Zm9vOmJhcg==", + "Accept: */*", + user_agent, + ].into_iter() + .map(|s| s.to_string()) + .collect() + ); + }); + + let script = project("script") + .file( + "Cargo.toml", + r#" + [project] + name = "script" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!("username=foo"); + println!("password=bar"); + } + "#, + ) + .build(); + + assert_that(script.cargo("build").arg("-v"), execs().with_status(0)); + let script = script.bin("script"); + + let config = paths::home().join(".gitconfig"); + let mut config = git2::Config::open(&config).unwrap(); + config + .set_str("credential.helper", &script.display().to_string()) + .unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + "\ + [net] + retry = 0 + ", + ) + .build(); + + // This is a "contains" check because the last error differs by platform, + // may span multiple lines, and isn't relevant to this test. + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains(&format!( + "\ +[UPDATING] git repository `http://{addr}/foo/bar` +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update http://{addr}/foo/bar + +Caused by: + failed to clone into: [..] + +Caused by: + failed to authenticate when downloading repository +attempted to find username/password via `credential.helper`, but [..] + +Caused by: +", + addr = addr + )), + ); + + t.join().ok().unwrap(); +} + +// Boy, sure would be nice to have a TLS implementation in rust! +#[test] +fn https_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move || { + let mut conn = server.accept().unwrap().0; + drop(conn.write(b"1234")); + drop(conn.shutdown(std::net::Shutdown::Write)); + drop(conn.read(&mut [0; 16])); + }); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + "\ + [net] + retry = 0 + ", + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(101) + .with_stderr_contains(&format!( + "[UPDATING] git repository `https://{addr}/foo/bar`", + addr = addr + )) + .with_stderr_contains(&format!( + "\ +Caused by: + {errmsg} +", + errmsg = if cfg!(windows) { + "[..]failed to send request: [..]" + } else if cfg!(target_os = "macos") { + // OSX is difficult to tests as some builds may use + // Security.framework and others may use OpenSSL. In that case let's + // just not verify the error message here. + "[..]" + } else { + "[..]SSL error: [..]" + } + )), + ); + + t.join().ok().unwrap(); +} + +// Boy, sure would be nice to have an SSH implementation in rust! +#[test] +fn ssh_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move || { + drop(server.accept().unwrap()); + }); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "ssh://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(101) + .with_stderr_contains(&format!( + "[UPDATING] git repository `ssh://{addr}/foo/bar`", + addr = addr + )) + .with_stderr_contains( + "\ +Caused by: + [..]failed to start SSH session: Failed getting banner[..] +", + ), + ); + t.join().ok().unwrap(); +} diff --git a/tests/testsuite/build_lib.rs b/tests/testsuite/build_lib.rs new file mode 100644 index 000000000..bb12c9d74 --- /dev/null +++ b/tests/testsuite/build_lib.rs @@ -0,0 +1,112 @@ +use cargotest::support::{basic_bin_manifest, execs, project, Project}; +use hamcrest::assert_that; + +fn verbose_output_for_lib(p: &Project) -> String { + format!( + "\ +[COMPILING] {name} v{version} ({url}) +[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + name = "foo", + version = "0.0.1" + ) +} + +#[test] +fn build_lib_only() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("build").arg("--lib").arg("-v"), + execs() + .with_status(0) + .with_stderr(verbose_output_for_lib(&p)), + ); +} + +#[test] +fn build_with_no_lib() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--lib"), + execs() + .with_status(101) + .with_stderr("[ERROR] no library targets found"), + ); +} + +#[test] +fn build_with_relative_cargo_home_path() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [dependencies] + + "test-dependency" = { path = "src/test_dependency" } + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/test_dependency/src/lib.rs", r#" "#) + .file( + "src/test_dependency/Cargo.toml", + r#" + [package] + + name = "test-dependency" + version = "0.0.1" + authors = ["wycats@example.com"] + "#, + ) + .build(); + + assert_that( + p.cargo("build").env("CARGO_HOME", "./cargo_home/"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs new file mode 100644 index 000000000..59aee0632 --- /dev/null +++ b/tests/testsuite/build_script.rs @@ -0,0 +1,3755 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cargotest::{rustc_host, sleep_ms}; +use cargotest::support::{execs, project}; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_dir, existing_file}; + +#[test] +fn custom_build_script_failed() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "build.rs", + r#" + fn main() { + std::process::exit(101); + } + "#, + ) + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr(&format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin [..]` +[RUNNING] `[..][/]build-script-build` +[ERROR] failed to run custom build command for `foo v0.5.0 ({url})` +process didn't exit successfully: `[..][/]build-script-build` (exit code: 101)", + url = p.url() + )), + ); +} + +#[test] +fn custom_build_env_vars() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [features] + bar_feat = ["bar/foo"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [features] + foo = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn hello() {} + "#, + ); + + let file_content = format!( + r#" + use std::env; + use std::io::prelude::*; + use std::path::Path; + use std::fs; + + fn main() {{ + let _target = env::var("TARGET").unwrap(); + let _ncpus = env::var("NUM_JOBS").unwrap(); + let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + + let opt = env::var("OPT_LEVEL").unwrap(); + assert_eq!(opt, "0"); + + let opt = env::var("PROFILE").unwrap(); + assert_eq!(opt, "debug"); + + let debug = env::var("DEBUG").unwrap(); + assert_eq!(debug, "true"); + + let out = env::var("OUT_DIR").unwrap(); + assert!(out.starts_with(r"{0}")); + assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false)); + + let _host = env::var("HOST").unwrap(); + + let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); + + let _cargo = env::var("CARGO").unwrap(); + + let rustc = env::var("RUSTC").unwrap(); + assert_eq!(rustc, "rustc"); + + let rustdoc = env::var("RUSTDOC").unwrap(); + assert_eq!(rustdoc, "rustdoc"); + }} + "#, + p.root() + .join("target") + .join("debug") + .join("build") + .display() + ); + + let p = p.file("bar/build.rs", &file_content).build(); + + assert_that( + p.cargo("build").arg("--features").arg("bar_feat"), + execs().with_status(0), + ); +} + +#[test] +fn custom_build_script_wrong_rustc_flags() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-aaa -bbb"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains(&format!( + "\ + [ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \ + `-aaa -bbb`", + p.url() + )), + ); +} + +/* +#[test] +fn custom_build_script_rustc_flags() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.foo] + path = "foo" + "#) + .file("src/main.rs", r#" + fn main() {} + "#) + .file("foo/Cargo.toml", r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#) + .file("foo/src/lib.rs", r#" + "#) + .file("foo/build.rs", r#" + fn main() { + println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); + } + "#) + .build(); + + // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works) + assert_that(p.cargo("build").arg("--verbose"), + execs().with_status(101) + .with_stderr(&format!("\ +[COMPILING] bar v0.5.0 ({url}) +[RUNNING] `rustc --crate-name test {dir}{sep}src{sep}lib.rs --crate-type lib -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=-[..] \ + --out-dir {dir}{sep}target \ + --emit=dep-info,link \ + -L {dir}{sep}target \ + -L {dir}{sep}target{sep}deps` +", sep = path::SEP, +dir = p.root().display(), +url = p.url(), +))); +} +*/ + +#[test] +fn links_no_build_cmd() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] package `foo v0.5.0 (file://[..])` specifies that it links to `a` but does \ +not have a custom build script +", + ), + ); +} + +#[test] +fn links_duplicates() { + // this tests that the links_duplicates are caught at resolver time + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a-sys/Cargo.toml", + r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a-sys/src/lib.rs", "") + .file("a-sys/build.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to select a version for `a-sys`. + ... required by package `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a-sys` which could resolve this conflict +")); +} + +#[test] +fn links_duplicates_deep_dependency() { + // this tests that the links_duplicates are caught at resolver time + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "") + .file( + "a/a-sys/Cargo.toml", + r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a/a-sys/src/lib.rs", "") + .file("a/a-sys/build.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to select a version for `a-sys`. + ... required by package `a v0.5.0 ([..])` + ... which is depended on by `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a-sys` which could resolve this conflict +")); +} + +#[test] +fn overrides_and_links() { + let target = rustc_host(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), + "bar"); + assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), + "baz"); + } + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, + target + ), + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "not valid rust code") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[..] +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name foo [..] -L foo -L bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn unused_overrides() { + let target = rustc_host(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, + target + ), + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn links_passes_env_vars() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + use std::env; + fn main() { + let lib = env::var("CARGO_MANIFEST_LINKS").unwrap(); + assert_eq!(lib, "foo"); + + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn only_rerun_build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn rebuild_continues_to_pass_env_vars() { + let a = project("a") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::time::Duration; + fn main() { + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + std::thread::sleep(Duration::from_millis(500)); + } + "#, + ) + .build(); + a.root().move_into_the_past(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = '{}' + "#, + a.root().display() + ), + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn testing_and_such() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() {} + "#, + ) + .build(); + + println!("build"); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + p.root().move_into_the_past(); + + File::create(&p.root().join("src/lib.rs")).unwrap(); + p.root().move_into_the_past(); + + println!("test"); + assert_that( + p.cargo("test").arg("-vj1"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]`", + ) + .with_stdout_contains_n("running 0 tests", 2), + ); + + println!("doc"); + assert_that( + p.cargo("doc").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[DOCUMENTING] foo v0.5.0 (file://[..]) +[RUNNING] `rustdoc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all(b"fn main() {}") + .unwrap(); + println!("run"); + assert_that( + p.cargo("run"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +", + ), + ); +} + +#[test] +fn propagation_of_l_flags() { + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L bar"); + } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo" + "#, + target + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("-j1"), + execs().with_status(0).with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +", + ), + ); +} + +#[test] +fn propagation_of_l_flags_new() { + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=bar"); + } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-link-search = ["foo"] + "#, + target + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("-j1"), + execs().with_status(0).with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +", + ), + ); +} + +#[test] +fn build_deps_simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name a [..]` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..] --extern a=[..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn build_deps_not_for_normal() { + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.aaaaa] + path = "a" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate aaaaa;", + ) + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate aaaaa; + fn main() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "aaaaa" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("--target").arg(&target), + execs() + .with_status(101) + .with_stderr_contains("[..]can't find crate for `aaaaa`[..]") + .with_stderr_contains( + "\ +[ERROR] Could not compile `foo`. + +Caused by: + process didn't exit successfully: [..] +", + ), + ); +} + +#[test] +fn build_cmd_with_a_build_cmd() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + "#[allow(unused_extern_crates)] extern crate b; fn main() {}", + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] b v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name b [..]` +[COMPILING] a v0.5.0 (file://[..]) +[RUNNING] `rustc [..] a[/]build.rs [..] --extern b=[..]` +[RUNNING] `[..][/]a-[..][/]build-script-build` +[RUNNING] `rustc --crate-name a [..]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..]target[/]debug[/]deps \ + -L [..]target[/]debug[/]deps` +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin \ + --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] --out-dir [..] \ + -L [..]target[/]debug[/]deps \ + --extern a=[..]liba[..].rlib` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L [..]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn out_dir_is_preserved() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::path::Path; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::create(Path::new(&out).join("foo")).unwrap(); + } + "#, + ) + .build(); + + // Make the file + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + p.root().move_into_the_past(); + + // Change to asserting that it's there + File::create(&p.root().join("build.rs")) + .unwrap() + .write_all( + br#" + use std::env; + use std::old_io::File; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::open(&Path::new(&out).join("foo")).unwrap(); + } + "#, + ) + .unwrap(); + p.root().move_into_the_past(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + // Run a fresh build where file should be preserved + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + // One last time to make sure it's still there. + File::create(&p.root().join("foo")).unwrap(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn output_separate_lines() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L foo"); + println!("cargo:rustc-flags=-l static=foo"); + } + "#, + ) + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +", + ), + ); +} + +#[test] +fn output_separate_lines_new() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=foo"); + println!("cargo:rustc-link-lib=static=foo"); + } + "#, + ) + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]foo-[..][/]build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +", + ), + ); +} + +#[cfg(not(windows))] // FIXME(#867) +#[test] +fn code_generation() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + include!(concat!(env!("OUT_DIR"), "/hello.rs")); + + fn main() { + println!("{}", message()); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::io::prelude::*; + use std::path::PathBuf; + + fn main() { + let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); + let mut f = File::create(&dst.join("hello.rs")).unwrap(); + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.5.0 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo`", + ) + .with_stdout("Hello, World!"), + ); + + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn release_with_build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0), + ); +} + +#[test] +fn build_script_only() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", r#"fn main() {}"#) + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present", + ), + ); +} + +#[test] +fn shared_dep_with_a_build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + + [build-dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "../a" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn transitive_dep_host() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [lib] + name = "b" + plugin = true + + [dependencies.a] + path = "../a" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn test_a_lib_with_a_build_command() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + include!(concat!(env!("OUT_DIR"), "/foo.rs")); + + /// ``` + /// foo::bar(); + /// ``` + pub fn bar() { + assert_eq!(foo(), 1); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::io::prelude::*; + use std::fs::File; + use std::path::PathBuf; + + fn main() { + let out = PathBuf::from(env::var("OUT_DIR").unwrap()); + File::create(out.join("foo.rs")).unwrap().write_all(b" + fn foo() -> i32 { 1 } + ").unwrap(); + } + "#, + ) + .build(); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn test_dev_dep_build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn build_script_with_dynamic_native_dependency() { + let _workspace = project("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["builder", "foo"] + "#, + ) + .build(); + + let build = project("ws/builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + #[no_mangle] + pub extern fn foo() {} + "#, + ) + .build(); + + let foo = project("ws/foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.bar] + path = "bar" + "#, + ) + .file( + "build.rs", + r#" + extern crate bar; + fn main() { bar::bar() } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "bar/build.rs", + r#" + use std::env; + use std::path::PathBuf; + + fn main() { + let src = PathBuf::from(env::var("SRC").unwrap()); + println!("cargo:rustc-link-search=native={}/target/debug/deps", + src.display()); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() { + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + unsafe { foo() } + } + "#, + ) + .build(); + + assert_that( + build + .cargo("build") + .arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc"), + execs().with_status(0), + ); + + assert_that( + foo.cargo("build") + .arg("-v") + .env("SRC", build.root()) + .env("RUST_LOG", "cargo::ops::cargo_rustc"), + execs().with_status(0), + ); +} + +#[test] +fn profile_and_opt_level_set_correctly() { + let build = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); + assert_eq!(env::var("PROFILE").unwrap(), "release"); + assert_eq!(env::var("DEBUG").unwrap(), "false"); + } + "#, + ) + .build(); + assert_that(build.cargo("bench"), execs().with_status(0)); +} + +#[test] +fn build_script_with_lto() { + let build = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + + [profile.dev] + lto = true + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + } + "#, + ) + .build(); + assert_that(build.cargo("build"), execs().with_status(0)); +} + +#[test] +fn test_duplicate_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + + [build-dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#, + ) + .file( + "build.rs", + r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn do_nothing() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn cfg_feedback() { + let build = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + " + #[cfg(foo)] + fn main() {} + ", + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .build(); + assert_that(build.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn cfg_override() { + let target = rustc_host(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + " + #[cfg(foo)] + fn main() {} + ", + ) + .file("build.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, + target + ), + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn cfg_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .file( + "src/lib.rs", + r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#, + ) + .file( + "tests/test.rs", + r#" + #[cfg(foo)] + #[test] + fn test_bar() {} + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("-v"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..][/]build-script-build` +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", + dir = p.url() + )) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3), + ); +} + +#[test] +fn cfg_doc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(foo)] + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "bar/build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=bar"); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(bar)] + pub fn bar() {} + "#, + ) + .build(); + assert_that(p.cargo("doc"), execs().with_status(0)); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that( + &p.root().join("target/doc/foo/fn.foo.html"), + existing_file(), + ); + assert_that( + &p.root().join("target/doc/bar/fn.bar.html"), + existing_file(), + ); +} + +#[test] +fn cfg_override_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + "#, + ) + .file("build.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, + rustc_host() + ), + ) + .file( + "src/lib.rs", + r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#, + ) + .file( + "tests/test.rs", + r#" + #[cfg(foo)] + #[test] + fn test_bar() {} + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("-v"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `[..]` +[RUNNING] `[..]` +[RUNNING] `[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", + dir = p.url() + )) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3), + ); +} + +#[test] +fn cfg_override_doc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{target}.a] + rustc-cfg = ["foo"] + [target.{target}.b] + rustc-cfg = ["bar"] + "#, + target = rustc_host() + ), + ) + .file("build.rs", "") + .file( + "src/lib.rs", + r#" + #[cfg(foo)] + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "b" + "#, + ) + .file("bar/build.rs", "") + .file( + "bar/src/lib.rs", + r#" + #[cfg(bar)] + pub fn bar() {} + "#, + ) + .build(); + assert_that(p.cargo("doc"), execs().with_status(0)); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that( + &p.root().join("target/doc/foo/fn.foo.html"), + existing_file(), + ); + assert_that( + &p.root().join("target/doc/bar/fn.bar.html"), + existing_file(), + ); +} + +#[test] +fn env_build() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + const FOO: &'static str = env!("FOO"); + fn main() { + println!("{}", FOO); + } + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#, + ) + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that( + p.cargo("run").arg("-v"), + execs().with_status(0).with_stdout("foo\n"), + ); +} + +#[test] +fn env_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#, + ) + .file( + "src/lib.rs", + r#" + pub const FOO: &'static str = env!("FOO"); + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo; + + #[test] + fn test_foo() { + assert_eq!("foo", foo::FOO); + } + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("-v"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..][/]build-script-build` +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name test[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]` +[RUNNING] `[..][/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --crate-name foo[..]", + dir = p.url() + )) + .with_stdout_contains_n("running 0 tests", 2) + .with_stdout_contains("test test_foo ... ok"), + ); +} + +#[test] +fn env_doc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + const FOO: &'static str = env!("FOO"); + fn main() {} + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-env=FOO=foo"); + } + "#, + ) + .build(); + assert_that(p.cargo("doc").arg("-v"), execs().with_status(0)); +} + +#[test] +fn flags_go_into_tests() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=test"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("-v").arg("--test=foo"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..] a[/]build.rs [..]` +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..] -L test[..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b[/]src[/]lib.rs [..] -L test[..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] src[/]lib.rs [..] -L test[..]` +[RUNNING] `rustc [..] tests[/]foo.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]foo-[..][EXE]`", + ) + .with_stdout_contains("running 0 tests"), + ); + + assert_that( + p.cargo("test").arg("-v").arg("-pb").arg("--lib"), + execs() + .with_status(0) + .with_stderr( + "\ +[FRESH] a v0.5.0 ([..] +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b[/]src[/]lib.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..][/]b-[..][EXE]`", + ) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn diamond_passes_args_only_once() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + b = { path = "../b" } + c = { path = "../c" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [project] + name = "c" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "c/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=test"); + } + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] c v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `[..]rlib -L native=test` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn adding_an_override_invalidates() { + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file(".cargo/config", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + format!( + " + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", + target + ).as_bytes(), + ) + .unwrap(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn changing_an_override_invalidates() { + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.foo] + rustc-link-search = [\"native=foo\"] + ", + target + ), + ) + .file("build.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + format!( + " + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", + target + ).as_bytes(), + ) + .unwrap(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn fresh_builds_possible_with_link_libs() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "nativefoo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.nativefoo] + rustc-link-lib = [\"a\"] + rustc-link-search = [\"./b\"] + rustc-flags = \"-l z -L ./\" + ", + target + ), + ) + .file("build.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build") + .arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"), + execs().with_status(0).with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn fresh_builds_possible_with_multiple_metadata_overrides() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.foo] + a = \"\" + b = \"\" + c = \"\" + d = \"\" + e = \"\" + ", + target + ), + ) + .file("build.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build") + .arg("-v") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"), + execs().with_status(0).with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn rebuild_only_on_explicit_paths() { + let p = project("a") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=foo"); + println!("cargo:rerun-if-changed=bar"); + } + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + + // files don't exist, so should always rerun if they don't exist + println!("run without"); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + File::create(p.root().join("bar")).unwrap(); + + // now the exist, so run once, catch the mtime, then shouldn't run again + println!("run with"); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + println!("run with2"); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] a v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + sleep_ms(1000); + + // random other files do not affect freshness + println!("run baz"); + File::create(p.root().join("baz")).unwrap(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] a v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + // but changing dependent files does + println!("run foo change"); + File::create(p.root().join("foo")).unwrap(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + // .. as does deleting a file + println!("run foo delete"); + fs::remove_file(p.root().join("bar")).unwrap(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn doctest_recieves_build_link_args() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("-v"), + execs().with_status(0).with_stderr_contains( + "[RUNNING] `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]`", + ), + ); +} + +#[test] +fn please_respect_the_dag() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies] + a = { path = 'a' } + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`"), + ); +} + +#[test] +fn non_utf8_output() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + use std::io::prelude::*; + + fn main() { + let mut out = std::io::stdout(); + // print something that's not utf8 + out.write_all(b"\xff\xff\n").unwrap(); + + // now print some cargo metadata that's utf8 + println!("cargo:rustc-cfg=foo"); + + // now print more non-utf8 + out.write_all(b"\xff\xff\n").unwrap(); + } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(foo)] + fn main() {} + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn custom_target_dir() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + target-dir = 'test' + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn panic_abort_with_build_scripts() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [profile.release] + panic = 'abort' + + [dependencies] + a = { path = "a" } + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies] + b = { path = "../b" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + "#[allow(unused_extern_crates)] extern crate b; fn main() {}", + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0), + ); +} + +#[test] +fn warnings_emitted() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn warnings_hidden_for_upstream() { + Package::new("bar", "0.1.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn warnings_printed_on_vv() { + Package::new("bar", "0.1.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-vv"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn output_shows_on_vv() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::io::prelude::*; + + fn main() { + std::io::stderr().write_all(b"stderr\n").unwrap(); + std::io::stdout().write_all(b"stdout\n").unwrap(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-vv"), + execs().with_status(0).with_stdout("stdout").with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +stderr +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn links_with_dots() { + let target = rustc_host(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + links = "a.b" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=bar") + } + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}.'a.b'] + rustc-link-search = ["foo"] + "#, + target + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`"), + ); +} + +#[test] +fn rustc_and_rustdoc_set_correctly() { + let p = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert_eq!(env::var("RUSTC").unwrap(), "rustc"); + assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc"); + } + "#, + ) + .build(); + assert_that(p.cargo("bench"), execs().with_status(0)); +} + +#[test] +fn cfg_env_vars_available() { + let p = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); + if cfg!(unix) { + assert_eq!(fam, "unix"); + } else { + assert_eq!(fam, "windows"); + } + } + "#, + ) + .build(); + assert_that(p.cargo("bench"), execs().with_status(0)); +} + +#[test] +fn switch_features_rerun() { + let p = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = "build.rs" + + [features] + foo = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!(include_str!(concat!(env!("OUT_DIR"), "/output"))); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::io::Write; + use std::path::Path; + + fn main() { + let out_dir = env::var_os("OUT_DIR").unwrap(); + let out_dir = Path::new(&out_dir).join("output"); + let mut f = File::create(&out_dir).unwrap(); + + if env::var_os("CARGO_FEATURE_FOO").is_some() { + f.write_all(b"foo").unwrap(); + } else { + f.write_all(b"bar").unwrap(); + } + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("-v").arg("--features=foo"), + execs().with_status(0).with_stdout("foo\n"), + ); + assert_that( + p.cargo("run").arg("-v"), + execs().with_status(0).with_stdout("bar\n"), + ); + assert_that( + p.cargo("run").arg("-v").arg("--features=foo"), + execs().with_status(0).with_stdout("foo\n"), + ); +} + +#[test] +fn assume_build_script_when_build_rs_present() { + let p = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if ! cfg!(foo) { + panic!("the build script was not run"); + } + } + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .build(); + + assert_that(p.cargo("run").arg("-v"), execs().with_status(0)); +} + +#[test] +fn if_build_set_to_false_dont_treat_build_rs_as_build_script() { + let p = project("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + build = false + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(foo) { + panic!("the build script was run"); + } + } + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .build(); + + assert_that(p.cargo("run").arg("-v"), execs().with_status(0)); +} + +#[test] +fn deterministic_rustc_dependency_flags() { + // This bug is non-deterministic hence the large number of dependencies + // in the hopes it will have a much higher chance of triggering it. + + Package::new("dep1", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep1" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test1"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep2", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep2" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test2"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep3", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep3" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test3"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep4", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep4" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test4"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + dep2 = "*" + dep3 = "*" + dep4 = "*" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \ +-L native=test3 -L native=test4` +", + ), + ); +} + +#[test] +fn links_duplicates_with_cycle() { + // this tests that the links_duplicates are caught at resolver time + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a] + path = "a" + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101) + .with_stderr("\ +error: failed to select a version for `a`. + ... required by package `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a` which could resolve this conflict +")); +} + +#[test] +fn rename_with_link_search_path() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file( + "src/lib.rs", + " + #[no_mangle] + pub extern fn cargo_test_foo() {} + ", + ); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let p2 = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs; + use std::path::PathBuf; + + fn main() { + // Move the `libfoo.so` from the root of our project into the + // build directory. This way Cargo should automatically manage + // `LD_LIBRARY_PATH` and such. + let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + let src = root.join(&file); + + let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + let dst = dst_dir.join(&file); + + fs::copy(&src, &dst).unwrap(); + // handle windows, like below + drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib"))); + + println!("cargo:rerun-if-changed=build.rs"); + if cfg!(target_env = "msvc") { + println!("cargo:rustc-link-lib=foo.dll"); + } else { + println!("cargo:rustc-link-lib=foo"); + } + println!("cargo:rustc-link-search={}", + dst.parent().unwrap().display()); + } + "#, + ) + .file( + "src/main.rs", + r#" + extern { + #[link_name = "cargo_test_foo"] + fn foo(); + } + + fn main() { + unsafe { foo(); } + } + "#, + ); + let p2 = p2.build(); + + // Move the output `libfoo.so` into the directory of `p2`, and then delete + // the `p` project. On OSX the `libfoo.dylib` artifact references the + // original path in `p` so we want to make sure that it can't find it (hence + // the deletion). + let root = PathBuf::from(p.root()); + let root = root.join("target").join("debug").join("deps"); + let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + let src = root.join(&file); + + let dst = p2.root().join(&file); + + fs::copy(&src, &dst).unwrap(); + // copy the import library for windows, if it exists + drop(fs::copy( + &root.join("foo.dll.lib"), + p2.root().join("foo.dll.lib"), + )); + fs::remove_dir_all(p.root()).unwrap(); + + // Everything should work the first time + assert_that(p2.cargo("run"), execs().with_status(0)); + + // Now rename the root directory and rerun `cargo run`. Not only should we + // not build anything but we also shouldn't crash. + let mut new = p2.root(); + new.pop(); + new.push("bar2"); + fs::rename(p2.root(), &new).unwrap(); + assert_that( + p2.cargo("run").cwd(&new), + execs().with_status(0).with_stderr( + "\ +[FINISHED] [..] +[RUNNING] [..] +", + ), + ); +} diff --git a/tests/testsuite/build_script_env.rs b/tests/testsuite/build_script_env.rs new file mode 100644 index 000000000..382ef2585 --- /dev/null +++ b/tests/testsuite/build_script_env.rs @@ -0,0 +1,140 @@ +use std::fs::File; + +use cargotest::sleep_ms; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn rerun_if_env_changes() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); + assert_that( + p.cargo("build").env("FOO", "bar"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); + assert_that( + p.cargo("build").env("FOO", "baz"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); + assert_that( + p.cargo("build").env("FOO", "baz"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn rerun_if_env_or_file_changes() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + println!("cargo:rerun-if-changed=foo"); + } + "#, + ) + .file("foo", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); + assert_that( + p.cargo("build").env("FOO", "bar"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); + assert_that( + p.cargo("build").env("FOO", "bar"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + assert_that( + p.cargo("build").env("FOO", "bar"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); +} diff --git a/tests/testsuite/cargo_alias_config.rs b/tests/testsuite/cargo_alias_config.rs new file mode 100644 index 000000000..a9e3df703 --- /dev/null +++ b/tests/testsuite/cargo_alias_config.rs @@ -0,0 +1,172 @@ +use cargotest::support::{basic_bin_manifest, execs, project}; +use hamcrest::assert_that; + +#[test] +fn alias_incorrect_config_type() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = 5 + "#, + ) + .build(); + + assert_that( + p.cargo("b-cargo-test").arg("-v"), + execs().with_status(101).with_stderr_contains( + "[ERROR] invalid configuration \ +for key `alias.b-cargo-test` +expected a list, but found a integer for [..]", + ), + ); +} + +#[test] +fn alias_default_config_overrides_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + b = "not_build" + "#, + ) + .build(); + + assert_that( + p.cargo("b").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[COMPILING] foo v0.5.0 [..]"), + ); +} + +#[test] +fn alias_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = "build" + "#, + ) + .build(); + + assert_that( + p.cargo("b-cargo-test").arg("-v"), + execs().with_status(0).with_stderr_contains( + "[COMPILING] foo v0.5.0 [..] +[RUNNING] `rustc --crate-name foo [..]", + ), + ); +} + +#[test] +fn alias_list_test() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = ["build", "--release"] + "#, + ) + .build(); + + assert_that( + p.cargo("b-cargo-test").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") + .with_stderr_contains("[RUNNING] `rustc --crate-name [..]"), + ); +} + +#[test] +fn alias_with_flags_config() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = "build --release" + "#, + ) + .build(); + + assert_that( + p.cargo("b-cargo-test").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") + .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]"), + ); +} + +#[test] +fn cant_shadow_builtin() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() { + }"#, + ) + .file( + ".cargo/config", + r#" + [alias] + build = "fetch" + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} diff --git a/tests/testsuite/cargo_command.rs b/tests/testsuite/cargo_command.rs new file mode 100644 index 000000000..f3ceeca63 --- /dev/null +++ b/tests/testsuite/cargo_command.rs @@ -0,0 +1,333 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; +use std::str; + +use cargo; +use cargotest::cargo_process; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::Package; +use cargotest::support::{basic_bin_manifest, cargo_exe, execs, project, Project}; +use hamcrest::{assert_that, existing_file}; + +#[cfg_attr(windows, allow(dead_code))] +enum FakeKind<'a> { + Executable, + Symlink { target: &'a Path }, +} + +/// Add an empty file with executable flags (and platform-dependent suffix). +/// TODO: move this to `Project` if other cases using this emerge. +fn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind) -> Project { + let path = proj.root() + .join(dir) + .join(&format!("{}{}", name, env::consts::EXE_SUFFIX)); + path.parent().unwrap().mkdir_p(); + match *kind { + FakeKind::Executable => { + File::create(&path).unwrap(); + make_executable(&path); + } + FakeKind::Symlink { target } => { + make_symlink(&path, target); + } + } + return proj; + + #[cfg(unix)] + fn make_executable(p: &Path) { + use std::os::unix::prelude::*; + + let mut perms = fs::metadata(p).unwrap().permissions(); + let mode = perms.mode(); + perms.set_mode(mode | 0o111); + fs::set_permissions(p, perms).unwrap(); + } + #[cfg(windows)] + fn make_executable(_: &Path) {} + #[cfg(unix)] + fn make_symlink(p: &Path, t: &Path) { + ::std::os::unix::fs::symlink(t, p).expect("Failed to create symlink"); + } + #[cfg(windows)] + fn make_symlink(_: &Path, _: &Path) { + panic!("Not supported") + } +} + +fn path() -> Vec { + env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() +} + +#[test] +fn list_command_looks_at_path() { + let proj = project("list-non-overlapping").build(); + let proj = fake_file( + proj, + Path::new("path-test"), + "cargo-1", + &FakeKind::Executable, + ); + let mut pr = cargo_process(); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = pr.arg("-v").arg("--list").env("PATH", &path); + let output = output.exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("\n 1 "), + "missing 1: {}", + output + ); +} + +// windows and symlinks don't currently agree that well +#[cfg(unix)] +#[test] +fn list_command_resolves_symlinks() { + let proj = project("list-non-overlapping").build(); + let proj = fake_file( + proj, + Path::new("path-test"), + "cargo-2", + &FakeKind::Symlink { + target: &cargo_exe(), + }, + ); + let mut pr = cargo_process(); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = pr.arg("-v").arg("--list").env("PATH", &path); + let output = output.exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("\n 2 "), + "missing 2: {}", + output + ); +} + +#[test] +fn find_closest_biuld_to_build() { + assert_that( + cargo_process().arg("biuld"), + execs().with_status(101).with_stderr_contains( + "\ +error: no such subcommand: `biuld` + +Did you mean `build`? +", + ), + ); + + // But, if we actually have `biuld`, it must work! + // https://github.com/rust-lang/cargo/issues/5201 + Package::new("cargo-biuld", "1.0.0") + .file( + "src/main.rs", + r#" + fn main() { + println!("Similar, but not identical to, build"); + } + "#, + ) + .publish(); + + assert_that( + cargo_process().arg("install").arg("cargo-biuld"), + execs().with_status(0), + ); + assert_that( + cargo_process().arg("biuld"), + execs() + .with_status(0) + .with_stdout("Similar, but not identical to, build\n"), + ); + assert_that( + cargo_process().arg("--list"), + execs() + .with_status(0) + .with_stdout_contains(" build\n") + .with_stdout_contains(" biuld\n"), + ); +} + +// if a subcommand is more than 3 edit distance away, we don't make a suggestion +#[test] +fn find_closest_dont_correct_nonsense() { + let mut pr = cargo_process(); + pr.arg("there-is-no-way-that-there-is-a-command-close-to-this") + .cwd(&paths::root()); + + assert_that( + pr, + execs().with_status(101).with_stderr( + "[ERROR] no such subcommand: \ + `there-is-no-way-that-there-is-a-command-close-to-this` +", + ), + ); +} + +#[test] +fn displays_subcommand_on_error() { + let mut pr = cargo_process(); + pr.arg("invalid-command"); + + assert_that( + pr, + execs().with_status(101).with_stderr( + "[ERROR] no such subcommand: `invalid-command` +", + ), + ); +} + +#[test] +fn override_cargo_home() { + let root = paths::root(); + let my_home = root.join("my_home"); + fs::create_dir(&my_home).unwrap(); + File::create(&my_home.join("config")) + .unwrap() + .write_all( + br#" + [cargo-new] + name = "foo" + email = "bar" + git = false + "#, + ) + .unwrap(); + + assert_that( + cargo_process() + .arg("new") + .arg("foo") + .env("USER", "foo") + .env("CARGO_HOME", &my_home), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#)); +} + +#[test] +fn cargo_subcommand_env() { + let src = format!( + r#" + use std::env; + + fn main() {{ + println!("{{}}", env::var("{}").unwrap()); + }} + "#, + cargo::CARGO_ENV + ); + + let p = project("cargo-envtest") + .file("Cargo.toml", &basic_bin_manifest("cargo-envtest")) + .file("src/main.rs", &src) + .build(); + + let target_dir = p.target_debug_dir(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("cargo-envtest"), existing_file()); + + let mut pr = cargo_process(); + let cargo = cargo_exe().canonicalize().unwrap(); + let mut path = path(); + path.push(target_dir); + let path = env::join_paths(path.iter()).unwrap(); + + assert_that( + pr.arg("envtest").env("PATH", &path), + execs().with_status(0).with_stdout(cargo.to_str().unwrap()), + ); +} + +#[test] +fn cargo_subcommand_args() { + let p = project("cargo-foo") + .file( + "Cargo.toml", + r#" + [package] + name = "cargo-foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + let args: Vec<_> = ::std::env::args().collect(); + println!("{:?}", args); + } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let cargo_foo_bin = p.bin("cargo-foo"); + assert_that(&cargo_foo_bin, existing_file()); + + let mut path = path(); + path.push(p.target_debug_dir()); + let path = env::join_paths(path.iter()).unwrap(); + + assert_that( + cargo_process() + .env("PATH", &path) + .arg("foo") + .arg("bar") + .arg("-v") + .arg("--help"), + execs().with_status(0).with_stdout(format!( + r#"[{:?}, "foo", "bar", "-v", "--help"]"#, + cargo_foo_bin + )), + ); +} + +#[test] +fn cargo_help() { + assert_that(cargo_process(), execs().with_status(0)); + assert_that(cargo_process().arg("help"), execs().with_status(0)); + assert_that(cargo_process().arg("-h"), execs().with_status(0)); + assert_that( + cargo_process().arg("help").arg("build"), + execs().with_status(0), + ); + assert_that( + cargo_process().arg("build").arg("-h"), + execs().with_status(0), + ); + assert_that( + cargo_process().arg("help").arg("help"), + execs().with_status(0), + ); +} + +#[test] +fn explain() { + assert_that( + cargo_process().arg("--explain").arg("E0001"), + execs().with_status(0).with_stdout_contains( + "This error suggests that the expression arm corresponding to the noted pattern", + ), + ); +} diff --git a/tests/testsuite/cargo_features.rs b/tests/testsuite/cargo_features.rs new file mode 100644 index 000000000..82de88676 --- /dev/null +++ b/tests/testsuite/cargo_features.rs @@ -0,0 +1,327 @@ +use cargotest::ChannelChanger; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn feature_required() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +consider adding `cargo-features = [\"test-dummy-unstable\"]` to the manifest +", + ), + ); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +this Cargo does not support nightly features, but if you +switch to nightly channel you can add +`cargo-features = [\"test-dummy-unstable\"]` to enable this feature +", + ), + ); +} + +#[test] +fn unknown_feature() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["foo"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + unknown cargo feature `foo` +", + ), + ); +} + +#[test] +fn stable_feature_warns() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-stable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: the cargo feature `test-dummy-stable` is now stable and is no longer \ +necessary to be listed in the manifest +[COMPILING] a [..] +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn nightly_feature_requires_nightly() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ), + ); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +", + ), + ); +} + +#[test] +fn nightly_feature_requires_nightly_in_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("a/src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a [..] +[COMPILING] b [..] +[FINISHED] [..] +", + ), + ); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `a` + +Caused by: + Unable to update [..] + +Caused by: + failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +", + ), + ); +} + +#[test] +fn cant_publish() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ), + ); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +", + ), + ); +} + +#[test] +fn z_flags_rejected() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-Zprint-im-a-teapot"), + execs() + .with_status(101) + .with_stderr("error: the `-Z` flag is only accepted on the nightly channel of Cargo"), + ); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo().arg("-Zarg"), + execs() + .with_status(101) + .with_stderr("error: unknown `-Z` flag specified: arg"), + ); + + assert_that( + p.cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zprint-im-a-teapot"), + execs() + .with_status(0) + .with_stdout("im-a-teapot = true\n") + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn publish_rejected() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("publish").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr( + "error: cannot publish crates which activate nightly-only cargo features to crates.io", + ), + ); +} diff --git a/tests/testsuite/cargotest/install.rs b/tests/testsuite/cargotest/install.rs new file mode 100644 index 000000000..0e6e643bb --- /dev/null +++ b/tests/testsuite/cargotest/install.rs @@ -0,0 +1,35 @@ +use std::fmt; +use std::path::{Path, PathBuf}; + +use hamcrest::{existing_file, MatchResult, Matcher}; + +use cargotest::support::paths; + +pub use self::InstalledExe as has_installed_exe; + +pub fn cargo_home() -> PathBuf { + paths::home().join(".cargo") +} + +pub struct InstalledExe(pub &'static str); + +pub fn exe(name: &str) -> String { + if cfg!(windows) { + format!("{}.exe", name) + } else { + name.to_string() + } +} + +impl> Matcher

for InstalledExe { + fn matches(&self, path: P) -> MatchResult { + let path = path.as_ref().join("bin").join(exe(self.0)); + existing_file().matches(&path) + } +} + +impl fmt::Debug for InstalledExe { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "installed exe `{}`", self.0) + } +} diff --git a/tests/testsuite/cargotest/mod.rs b/tests/testsuite/cargotest/mod.rs new file mode 100644 index 000000000..73d5c370b --- /dev/null +++ b/tests/testsuite/cargotest/mod.rs @@ -0,0 +1,86 @@ +use std::ffi::OsStr; +use std::time::Duration; + +use cargo::util::Rustc; +use cargo; +use std::path::PathBuf; + +#[macro_use] +pub mod support; + +pub mod install; + +thread_local!(pub static RUSTC: Rustc = Rustc::new(PathBuf::from("rustc"), None).unwrap()); + +pub fn rustc_host() -> String { + RUSTC.with(|r| r.host.clone()) +} + +pub fn is_nightly() -> bool { + RUSTC.with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev")) +} + +pub fn process>(t: T) -> cargo::util::ProcessBuilder { + _process(t.as_ref()) +} + +fn _process(t: &OsStr) -> cargo::util::ProcessBuilder { + let mut p = cargo::util::process(t); + p.cwd(&support::paths::root()) + .env_remove("CARGO_HOME") + .env("HOME", support::paths::home()) + .env("CARGO_HOME", support::paths::home().join(".cargo")) + .env("__CARGO_TEST_ROOT", support::paths::root()) + + // Force cargo to think it's on the stable channel for all tests, this + // should hopefully not surprise us as we add cargo features over time and + // cargo rides the trains. + .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") + + // For now disable incremental by default as support hasn't ridden to the + // stable channel yet. Once incremental support hits the stable compiler we + // can switch this to one and then fix the tests. + .env("CARGO_INCREMENTAL", "0") + + // This env var can switch the git backend from libgit2 to git2-curl, which + // can tweak error messages and cause some tests to fail, so let's forcibly + // remove it. + .env_remove("CARGO_HTTP_CHECK_REVOKE") + + .env_remove("__CARGO_DEFAULT_LIB_METADATA") + .env_remove("RUSTC") + .env_remove("RUSTDOC") + .env_remove("RUSTC_WRAPPER") + .env_remove("RUSTFLAGS") + .env_remove("XDG_CONFIG_HOME") // see #2345 + .env("GIT_CONFIG_NOSYSTEM", "1") // keep trying to sandbox ourselves + .env_remove("EMAIL") + .env_remove("MFLAGS") + .env_remove("MAKEFLAGS") + .env_remove("CARGO_MAKEFLAGS") + .env_remove("GIT_AUTHOR_NAME") + .env_remove("GIT_AUTHOR_EMAIL") + .env_remove("GIT_COMMITTER_NAME") + .env_remove("GIT_COMMITTER_EMAIL") + .env_remove("CARGO_TARGET_DIR") // we assume 'target' + .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows + return p; +} + +pub trait ChannelChanger: Sized { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self; +} + +impl ChannelChanger for cargo::util::ProcessBuilder { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { + self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") + } +} + +pub fn cargo_process() -> cargo::util::ProcessBuilder { + process(&support::cargo_exe()) +} + +pub fn sleep_ms(ms: u64) { + ::std::thread::sleep(Duration::from_millis(ms)); +} diff --git a/tests/testsuite/cargotest/support/cross_compile.rs b/tests/testsuite/cargotest/support/cross_compile.rs new file mode 100644 index 000000000..e0ad93ae3 --- /dev/null +++ b/tests/testsuite/cargotest/support/cross_compile.rs @@ -0,0 +1,137 @@ +use std::env; +use std::process::Command; +use std::sync::{Once, ONCE_INIT}; +use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT}; + +use cargotest::support::{basic_bin_manifest, main_file, project}; + +pub fn disabled() -> bool { + // First, disable if ./configure requested so + match env::var("CFG_DISABLE_CROSS_TESTS") { + Ok(ref s) if *s == "1" => return true, + _ => {} + } + + // Right now the windows bots cannot cross compile due to the mingw setup, + // so we disable ourselves on all but macos/linux setups where the rustc + // install script ensures we have both architectures + if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) { + return true; + } + + // It's not particularly common to have a cross-compilation setup, so + // try to detect that before we fail a bunch of tests through no fault + // of the user. + static CAN_RUN_CROSS_TESTS: AtomicBool = ATOMIC_BOOL_INIT; + static CHECK: Once = ONCE_INIT; + + let cross_target = alternate(); + + CHECK.call_once(|| { + let p = project("cross_test") + .file("Cargo.toml", &basic_bin_manifest("cross_test")) + .file("src/cross_test.rs", &main_file(r#""testing!""#, &[])) + .build(); + + let result = p.cargo("build") + .arg("--target") + .arg(&cross_target) + .exec_with_output(); + + if result.is_ok() { + CAN_RUN_CROSS_TESTS.store(true, Ordering::SeqCst); + } + }); + + if CAN_RUN_CROSS_TESTS.load(Ordering::SeqCst) { + // We were able to compile a simple project, so the user has the + // necessary std:: bits installed. Therefore, tests should not + // be disabled. + return false; + } + + // We can't compile a simple cross project. We want to warn the user + // by failing a single test and having the remainder of the cross tests + // pass. We don't use std::sync::Once here because panicing inside its + // call_once method would poison the Once instance, which is not what + // we want. + static HAVE_WARNED: AtomicBool = ATOMIC_BOOL_INIT; + + if HAVE_WARNED.swap(true, Ordering::SeqCst) { + // We are some other test and somebody else is handling the warning. + // Just disable the current test. + return true; + } + + // We are responsible for warning the user, which we do by panicing. + let rustup_available = Command::new("rustup").output().is_ok(); + + let linux_help = if cfg!(target_os = "linux") { + " + +You may need to install runtime libraries for your Linux distribution as well." + .to_string() + } else { + "".to_string() + }; + + let rustup_help = if rustup_available { + format!( + " + +Alternatively, you can install the necessary libraries for cross-compilation with + + rustup target add {}{}", + cross_target, linux_help + ) + } else { + "".to_string() + }; + + panic!( + "Cannot cross compile to {}. + +This failure can be safely ignored. If you would prefer to not see this +failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".{} +", + cross_target, rustup_help + ); +} + +pub fn alternate() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "i686", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} + +pub fn alternate_arch() -> &'static str { + match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "x86", + _ => unreachable!(), + } +} + +pub fn host() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "i686", + "x86_64" => "x86_64", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} diff --git a/tests/testsuite/cargotest/support/git.rs b/tests/testsuite/cargotest/support/git.rs new file mode 100644 index 000000000..f9cfe4587 --- /dev/null +++ b/tests/testsuite/cargotest/support/git.rs @@ -0,0 +1,164 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use cargo::util::ProcessError; +use git2; +use url::Url; + +use cargotest::support::{project, Project, ProjectBuilder, path2url}; + +#[must_use] +pub struct RepoBuilder { + repo: git2::Repository, + files: Vec, +} + +pub struct Repository(git2::Repository); + +pub fn repo(p: &Path) -> RepoBuilder { + RepoBuilder::init(p) +} + +impl RepoBuilder { + pub fn init(p: &Path) -> RepoBuilder { + t!(fs::create_dir_all(p.parent().unwrap())); + let repo = t!(git2::Repository::init(p)); + { + let mut config = t!(repo.config()); + t!(config.set_str("user.name", "name")); + t!(config.set_str("user.email", "email")); + } + RepoBuilder { + repo, + files: Vec::new(), + } + } + + pub fn file(self, path: &str, contents: &str) -> RepoBuilder { + let mut me = self.nocommit_file(path, contents); + me.files.push(PathBuf::from(path)); + me + } + + pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { + let dst = self.repo.workdir().unwrap().join(path); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)).write_all(contents.as_bytes())); + self + } + + pub fn build(self) -> Repository { + { + let mut index = t!(self.repo.index()); + for file in self.files.iter() { + t!(index.add_path(file)); + } + t!(index.write()); + let id = t!(index.write_tree()); + let tree = t!(self.repo.find_tree(id)); + let sig = t!(self.repo.signature()); + t!(self.repo + .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); + } + let RepoBuilder { repo, .. } = self; + Repository(repo) + } +} + +impl Repository { + pub fn root(&self) -> &Path { + self.0.workdir().unwrap() + } + + pub fn url(&self) -> Url { + path2url(self.0.workdir().unwrap().to_path_buf()) + } +} + +pub fn new(name: &str, callback: F) -> Result +where + F: FnOnce(ProjectBuilder) -> ProjectBuilder, +{ + let mut git_project = project(name); + git_project = callback(git_project); + let git_project = git_project.build(); + + let repo = t!(git2::Repository::init(&git_project.root())); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + add(&repo); + commit(&repo); + Ok(git_project) +} + +pub fn add(repo: &git2::Repository) { + // FIXME(libgit2/libgit2#2514): apparently add_all will add all submodules + // as well, and then fail b/c they're a directory. As a stopgap, we just + // ignore all submodules. + let mut s = t!(repo.submodules()); + for submodule in s.iter_mut() { + t!(submodule.add_to_index(false)); + } + let mut index = t!(repo.index()); + t!(index.add_all( + ["*"].iter(), + git2::IndexAddOption::DEFAULT, + Some( + &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) { + 1 + } else { + 0 + }) + ) + )); + t!(index.write()); +} + +pub fn add_submodule<'a>( + repo: &'a git2::Repository, + url: &str, + path: &Path, +) -> git2::Submodule<'a> { + let path = path.to_str().unwrap().replace(r"\", "/"); + let mut s = t!(repo.submodule(url, Path::new(&path), false)); + let subrepo = t!(s.open()); + t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); + let mut origin = t!(subrepo.find_remote("origin")); + t!(origin.fetch(&[], None, None)); + t!(subrepo.checkout_head(None)); + t!(s.add_finalize()); + return s; +} + +pub fn commit(repo: &git2::Repository) -> git2::Oid { + let tree_id = t!(t!(repo.index()).write_tree()); + let sig = t!(repo.signature()); + let mut parents = Vec::new(); + match repo.head().ok().map(|h| h.target().unwrap()) { + Some(parent) => parents.push(t!(repo.find_commit(parent))), + None => {} + } + let parents = parents.iter().collect::>(); + t!(repo.commit( + Some("HEAD"), + &sig, + &sig, + "test", + &t!(repo.find_tree(tree_id)), + &parents + )) +} + +pub fn tag(repo: &git2::Repository, name: &str) { + let head = repo.head().unwrap().target().unwrap(); + t!(repo.tag( + name, + &t!(repo.find_object(head, None)), + &t!(repo.signature()), + "make a new tag", + false + )); +} diff --git a/tests/testsuite/cargotest/support/mod.rs b/tests/testsuite/cargotest/support/mod.rs new file mode 100644 index 000000000..9ee70bf7c --- /dev/null +++ b/tests/testsuite/cargotest/support/mod.rs @@ -0,0 +1,1022 @@ +use std::env; +use std::ffi::OsStr; +use std::fmt; +use std::fs; +use std::io::prelude::*; +use std::os; +use std::path::{Path, PathBuf}; +use std::process::Output; +use std::str; +use std::usize; + +use serde_json::{self, Value}; +use url::Url; +use hamcrest as ham; +use cargo::util::ProcessBuilder; +use cargo::util::ProcessError; + +use cargotest::support::paths::CargoPathExt; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +pub mod paths; +pub mod git; +pub mod registry; +pub mod cross_compile; +pub mod publish; + +/* + * + * ===== Builders ===== + * + */ + +#[derive(PartialEq, Clone)] +struct FileBuilder { + path: PathBuf, + body: String, +} + +impl FileBuilder { + pub fn new(path: PathBuf, body: &str) -> FileBuilder { + FileBuilder { + path, + body: body.to_string(), + } + } + + fn mk(&self) { + self.dirname().mkdir_p(); + + let mut file = fs::File::create(&self.path) + .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e)); + + t!(file.write_all(self.body.as_bytes())); + } + + fn dirname(&self) -> &Path { + self.path.parent().unwrap() + } +} + +#[derive(PartialEq, Clone)] +struct SymlinkBuilder { + dst: PathBuf, + src: PathBuf, +} + +impl SymlinkBuilder { + pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { dst, src } + } + + #[cfg(unix)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::unix::fs::symlink(&self.dst, &self.src)); + } + + #[cfg(windows)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } + + fn dirname(&self) -> &Path { + self.src.parent().unwrap() + } +} + +#[derive(PartialEq, Clone)] +pub struct Project { + root: PathBuf, +} + +#[must_use] +#[derive(PartialEq, Clone)] +pub struct ProjectBuilder { + name: String, + root: Project, + files: Vec, + symlinks: Vec, +} + +impl ProjectBuilder { + pub fn root(&self) -> PathBuf { + self.root.root() + } + + pub fn target_debug_dir(&self) -> PathBuf { + self.root.target_debug_dir() + } + + pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { + ProjectBuilder { + name: name.to_string(), + root: Project { root }, + files: vec![], + symlinks: vec![], + } + } + + pub fn file>(mut self, path: B, body: &str) -> Self { + self._file(path.as_ref(), body); + self + } + + fn _file(&mut self, path: &Path, body: &str) { + self.files + .push(FileBuilder::new(self.root.root.join(path), body)); + } + + pub fn symlink>(mut self, dst: T, src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new( + self.root.root.join(dst), + self.root.root.join(src), + )); + self + } + + pub fn build(self) -> Project { + // First, clean the directory if it already exists + self.rm_root(); + + // Create the empty directory + self.root.root.mkdir_p(); + + for file in self.files.iter() { + file.mk(); + } + + for symlink in self.symlinks.iter() { + symlink.mk(); + } + + let ProjectBuilder { + name: _, + root, + files: _, + symlinks: _, + .. + } = self; + root + } + + fn rm_root(&self) { + self.root.root.rm_rf() + } +} + +impl Project { + pub fn root(&self) -> PathBuf { + self.root.clone() + } + + pub fn build_dir(&self) -> PathBuf { + self.root.join("target") + } + + pub fn target_debug_dir(&self) -> PathBuf { + self.build_dir().join("debug") + } + + pub fn url(&self) -> Url { + path2url(self.root()) + } + + pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { + let prefix = Project::get_lib_prefix(kind); + + let extension = Project::get_lib_extension(kind); + + let lib_file_name = format!("{}{}.{}", prefix, name, extension); + + self.target_debug_dir() + .join("examples") + .join(&lib_file_name) + } + + pub fn bin(&self, b: &str) -> PathBuf { + self.build_dir() + .join("debug") + .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) + } + + pub fn release_bin(&self, b: &str) -> PathBuf { + self.build_dir() + .join("release") + .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) + } + + pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { + self.build_dir().join(target).join("debug").join(&format!( + "{}{}", + b, + env::consts::EXE_SUFFIX + )) + } + + pub fn change_file(&self, path: &str, body: &str) { + FileBuilder::new(self.root.join(path), body).mk() + } + + pub fn process>(&self, program: T) -> ProcessBuilder { + let mut p = ::cargotest::process(program); + p.cwd(self.root()); + return p; + } + + pub fn cargo(&self, cmd: &str) -> ProcessBuilder { + let mut p = self.process(&cargo_exe()); + for arg in cmd.split_whitespace() { + if arg.contains('"') || arg.contains('\'') { + panic!("shell-style argument parsing is not supported") + } + p.arg(arg); + } + return p; + } + + pub fn read_lockfile(&self) -> String { + let mut buffer = String::new(); + fs::File::open(self.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut buffer) + .unwrap(); + buffer + } + + pub fn uncomment_root_manifest(&self) { + let mut contents = String::new(); + fs::File::open(self.root().join("Cargo.toml")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + fs::File::create(self.root().join("Cargo.toml")) + .unwrap() + .write_all(contents.replace("#", "").as_bytes()) + .unwrap(); + } + + fn get_lib_prefix(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "lib", + "staticlib" | "dylib" | "proc-macro" => { + if cfg!(windows) { + "" + } else { + "lib" + } + } + _ => unreachable!(), + } + } + + fn get_lib_extension(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "rlib", + "staticlib" => { + if cfg!(windows) { + "lib" + } else { + "a" + } + } + "dylib" | "proc-macro" => { + if cfg!(windows) { + "dll" + } else if cfg!(target_os = "macos") { + "dylib" + } else { + "so" + } + } + _ => unreachable!(), + } + } +} + +// Generates a project layout +pub fn project(name: &str) -> ProjectBuilder { + ProjectBuilder::new(name, paths::root().join(name)) +} + +// Generates a project layout inside our fake home dir +pub fn project_in_home(name: &str) -> ProjectBuilder { + ProjectBuilder::new(name, paths::home().join(name)) +} + +// === Helpers === + +pub fn main_file(println: &str, deps: &[&str]) -> String { + let mut buf = String::new(); + + for dep in deps.iter() { + buf.push_str(&format!("extern crate {};\n", dep)); + } + + buf.push_str("fn main() { println!("); + buf.push_str(&println); + buf.push_str("); }\n"); + + buf.to_string() +} + +trait ErrMsg { + fn with_err_msg(self, val: String) -> Result; +} + +impl ErrMsg for Result { + fn with_err_msg(self, val: String) -> Result { + match self { + Ok(val) => Ok(val), + Err(err) => Err(format!("{}; original={}", val, err)), + } + } +} + +// Path to cargo executables +pub fn cargo_dir() -> PathBuf { + env::var_os("CARGO_BIN_PATH") + .map(PathBuf::from) + .or_else(|| { + env::current_exe().ok().map(|mut path| { + path.pop(); + if path.ends_with("deps") { + path.pop(); + } + path + }) + }) + .unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test")) +} + +pub fn cargo_exe() -> PathBuf { + cargo_dir().join(format!("cargo{}", env::consts::EXE_SUFFIX)) +} + +/// Returns an absolute path in the filesystem that `path` points to. The +/// returned path does not contain any symlinks in its hierarchy. +/* + * + * ===== Matchers ===== + * + */ + +#[derive(Clone)] +pub struct Execs { + expect_stdout: Option, + expect_stdin: Option, + expect_stderr: Option, + expect_exit_code: Option, + expect_stdout_contains: Vec, + expect_stderr_contains: Vec, + expect_either_contains: Vec, + expect_stdout_contains_n: Vec<(String, usize)>, + expect_stdout_not_contains: Vec, + expect_stderr_not_contains: Vec, + expect_neither_contains: Vec, + expect_json: Option>, + stream_output: bool, +} + +impl Execs { + pub fn with_stdout(mut self, expected: S) -> Execs { + self.expect_stdout = Some(expected.to_string()); + self + } + + pub fn with_stderr(mut self, expected: S) -> Execs { + self._with_stderr(&expected); + self + } + + fn _with_stderr(&mut self, expected: &ToString) { + self.expect_stderr = Some(expected.to_string()); + } + + pub fn with_status(mut self, expected: i32) -> Execs { + self.expect_exit_code = Some(expected); + self + } + + pub fn with_stdout_contains(mut self, expected: S) -> Execs { + self.expect_stdout_contains.push(expected.to_string()); + self + } + + pub fn with_stderr_contains(mut self, expected: S) -> Execs { + self.expect_stderr_contains.push(expected.to_string()); + self + } + + pub fn with_either_contains(mut self, expected: S) -> Execs { + self.expect_either_contains.push(expected.to_string()); + self + } + + pub fn with_stdout_contains_n(mut self, expected: S, number: usize) -> Execs { + self.expect_stdout_contains_n + .push((expected.to_string(), number)); + self + } + + pub fn with_stdout_does_not_contain(mut self, expected: S) -> Execs { + self.expect_stdout_not_contains.push(expected.to_string()); + self + } + + pub fn with_stderr_does_not_contain(mut self, expected: S) -> Execs { + self.expect_stderr_not_contains.push(expected.to_string()); + self + } + + pub fn with_json(mut self, expected: &str) -> Execs { + self.expect_json = Some( + expected + .split("\n\n") + .map(|obj| obj.parse().unwrap()) + .collect(), + ); + self + } + + /// Forward subordinate process stdout/stderr to the terminal. + /// Useful for prtintf debugging of the tests. + #[allow(unused)] + pub fn stream(mut self) -> Execs { + self.stream_output = true; + self + } + + fn match_output(&self, actual: &Output) -> ham::MatchResult { + self.match_status(actual) + .and(self.match_stdout(actual)) + .and(self.match_stderr(actual)) + } + + fn match_status(&self, actual: &Output) -> ham::MatchResult { + match self.expect_exit_code { + None => Ok(()), + Some(code) if actual.status.code() == Some(code) => Ok(()), + Some(_) => Err(format!( + "exited with {}\n--- stdout\n{}\n--- stderr\n{}", + actual.status, + String::from_utf8_lossy(&actual.stdout), + String::from_utf8_lossy(&actual.stderr) + )), + } + } + + fn match_stdout(&self, actual: &Output) -> ham::MatchResult { + self.match_std( + self.expect_stdout.as_ref(), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::Exact, + )?; + for expect in self.expect_stdout_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::Partial, + )?; + } + for expect in self.expect_stderr_contains.iter() { + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::Partial, + )?; + } + for &(ref expect, number) in self.expect_stdout_contains_n.iter() { + self.match_std( + Some(&expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::PartialN(number), + )?; + } + for expect in self.expect_stdout_not_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::NotPresent, + )?; + } + for expect in self.expect_stderr_not_contains.iter() { + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::NotPresent, + )?; + } + for expect in self.expect_neither_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stdout, + MatchKind::NotPresent, + )?; + + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stderr, + MatchKind::NotPresent, + )?; + } + + for expect in self.expect_either_contains.iter() { + let match_std = self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stdout, + MatchKind::Partial, + ); + let match_err = self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stderr, + MatchKind::Partial, + ); + + if let (Err(_), Err(_)) = (match_std, match_err) { + Err(format!( + "expected to find:\n\ + {}\n\n\ + did not find in either output.", + expect + ))?; + } + } + + if let Some(ref objects) = self.expect_json { + let stdout = str::from_utf8(&actual.stdout) + .map_err(|_| "stdout was not utf8 encoded".to_owned())?; + let lines = stdout.lines().collect::>(); + if lines.len() != objects.len() { + return Err(format!( + "expected {} json lines, got {}, stdout:\n{}", + objects.len(), + lines.len(), + stdout + )); + } + for (obj, line) in objects.iter().zip(lines) { + self.match_json(obj, line)?; + } + } + Ok(()) + } + + fn match_stderr(&self, actual: &Output) -> ham::MatchResult { + self.match_std( + self.expect_stderr.as_ref(), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::Exact, + ) + } + + fn match_std( + &self, + expected: Option<&String>, + actual: &[u8], + description: &str, + extra: &[u8], + kind: MatchKind, + ) -> ham::MatchResult { + let out = match expected { + Some(out) => out, + None => return Ok(()), + }; + let actual = match str::from_utf8(actual) { + Err(..) => return Err(format!("{} was not utf8 encoded", description)), + Ok(actual) => actual, + }; + // Let's not deal with \r\n vs \n on windows... + let actual = actual.replace("\r", ""); + let actual = actual.replace("\t", ""); + + match kind { + MatchKind::Exact => { + let a = actual.lines(); + let e = out.lines(); + + let diffs = self.diff_lines(a, e, false); + if diffs.is_empty() { + Ok(()) + } else { + Err(format!( + "differences:\n\ + {}\n\n\ + other output:\n\ + `{}`", + diffs.join("\n"), + String::from_utf8_lossy(extra) + )) + } + } + MatchKind::Partial => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut diffs = self.diff_lines(a.clone(), e.clone(), true); + while let Some(..) = a.next() { + let a = self.diff_lines(a.clone(), e.clone(), true); + if a.len() < diffs.len() { + diffs = a; + } + } + if diffs.is_empty() { + Ok(()) + } else { + Err(format!( + "expected to find:\n\ + {}\n\n\ + did not find in output:\n\ + {}", + out, actual + )) + } + } + MatchKind::PartialN(number) => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut matches = 0; + + while let Some(..) = { + if self.diff_lines(a.clone(), e.clone(), true).is_empty() { + matches += 1; + } + a.next() + } {} + + if matches == number { + Ok(()) + } else { + Err(format!( + "expected to find {} occurrences:\n\ + {}\n\n\ + did not find in output:\n\ + {}", + number, out, actual + )) + } + } + MatchKind::NotPresent => { + if !actual.contains(out) { + Ok(()) + } else { + Err(format!( + "expected not to find:\n\ + {}\n\n\ + but found in output:\n\ + {}", + out, actual + )) + } + } + } + } + + fn match_json(&self, expected: &Value, line: &str) -> ham::MatchResult { + let actual = match line.parse() { + Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), + Ok(actual) => actual, + }; + + match find_mismatch(expected, &actual) { + Some((expected_part, actual_part)) => Err(format!( + "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", + serde_json::to_string_pretty(expected).unwrap(), + serde_json::to_string_pretty(&actual).unwrap(), + serde_json::to_string_pretty(expected_part).unwrap(), + serde_json::to_string_pretty(actual_part).unwrap(), + )), + None => Ok(()), + } + } + + fn diff_lines<'a>( + &self, + actual: str::Lines<'a>, + expected: str::Lines<'a>, + partial: bool, + ) -> Vec { + let actual = actual.take(if partial { + expected.clone().count() + } else { + usize::MAX + }); + zip_all(actual, expected) + .enumerate() + .filter_map(|(i, (a, e))| match (a, e) { + (Some(a), Some(e)) => { + if lines_match(&e, &a) { + None + } else { + Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) + } + } + (Some(a), None) => Some(format!("{:3} -\n + |{}|\n", i, a)), + (None, Some(e)) => Some(format!("{:3} - |{}|\n +\n", i, e)), + (None, None) => panic!("Cannot get here"), + }) + .collect() + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum MatchKind { + Exact, + Partial, + PartialN(usize), + NotPresent, +} + +pub fn lines_match(expected: &str, mut actual: &str) -> bool { + let expected = substitute_macros(expected); + for (i, part) in expected.split("[..]").enumerate() { + match actual.find(part) { + Some(j) => { + if i == 0 && j != 0 { + return false; + } + actual = &actual[j + part.len()..]; + } + None => return false, + } + } + actual.is_empty() || expected.ends_with("[..]") +} + +#[test] +fn lines_match_works() { + assert!(lines_match("a b", "a b")); + assert!(lines_match("a[..]b", "a b")); + assert!(lines_match("a[..]", "a b")); + assert!(lines_match("[..]", "a b")); + assert!(lines_match("[..]b", "a b")); + + assert!(!lines_match("[..]b", "c")); + assert!(!lines_match("b", "c")); + assert!(!lines_match("b", "cb")); +} + +// Compares JSON object for approximate equality. +// You can use `[..]` wildcard in strings (useful for OS dependent things such +// as paths). You can use a `"{...}"` string literal as a wildcard for +// arbitrary nested JSON (useful for parts of object emitted by other programs +// (e.g. rustc) rather than Cargo itself). Arrays are sorted before comparison. +fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { + use serde_json::Value::*; + match (expected, actual) { + (&Number(ref l), &Number(ref r)) if l == r => None, + (&Bool(l), &Bool(r)) if l == r => None, + (&String(ref l), &String(ref r)) if lines_match(l, r) => None, + (&Array(ref l), &Array(ref r)) => { + if l.len() != r.len() { + return Some((expected, actual)); + } + + let mut l = l.iter().collect::>(); + let mut r = r.iter().collect::>(); + + l.retain( + |l| match r.iter().position(|r| find_mismatch(l, r).is_none()) { + Some(i) => { + r.remove(i); + false + } + None => true, + }, + ); + + if l.len() > 0 { + assert!(r.len() > 0); + Some((&l[0], &r[0])) + } else { + assert_eq!(r.len(), 0); + None + } + } + (&Object(ref l), &Object(ref r)) => { + let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); + if !same_keys { + return Some((expected, actual)); + } + + l.values() + .zip(r.values()) + .filter_map(|(l, r)| find_mismatch(l, r)) + .nth(0) + } + (&Null, &Null) => None, + // magic string literal "{...}" acts as wildcard for any sub-JSON + (&String(ref l), _) if l == "{...}" => None, + _ => Some((expected, actual)), + } +} + +struct ZipAll { + first: I1, + second: I2, +} + +impl, I2: Iterator> Iterator for ZipAll { + type Item = (Option, Option); + fn next(&mut self) -> Option<(Option, Option)> { + let first = self.first.next(); + let second = self.second.next(); + + match (first, second) { + (None, None) => None, + (a, b) => Some((a, b)), + } + } +} + +fn zip_all, I2: Iterator>(a: I1, b: I2) -> ZipAll { + ZipAll { + first: a, + second: b, + } +} + +impl fmt::Debug for Execs { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "execs") + } +} + +impl ham::Matcher for Execs { + fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { + self.matches(&mut process) + } +} + +impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { + fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { + println!("running {}", process); + let res = if self.stream_output { + if env::var("CI").is_ok() { + panic!("`.stream()` is for local debugging") + } + process.exec_with_streaming( + &mut |out| Ok(println!("{}", out)), + &mut |err| Ok(eprintln!("{}", err)), + false, + ) + } else { + process.exec_with_output() + }; + + match res { + Ok(out) => self.match_output(&out), + Err(e) => { + let err = e.downcast_ref::(); + if let Some(&ProcessError { + output: Some(ref out), + .. + }) = err + { + return self.match_output(out); + } + let mut s = format!("could not exec process {}: {}", process, e); + for cause in e.causes() { + s.push_str(&format!("\ncaused by: {}", cause)); + } + Err(s) + } + } + } +} + +impl ham::Matcher for Execs { + fn matches(&self, output: Output) -> ham::MatchResult { + self.match_output(&output) + } +} + +pub fn execs() -> Execs { + Execs { + expect_stdout: None, + expect_stderr: None, + expect_stdin: None, + expect_exit_code: None, + expect_stdout_contains: Vec::new(), + expect_stderr_contains: Vec::new(), + expect_either_contains: Vec::new(), + expect_stdout_contains_n: Vec::new(), + expect_stdout_not_contains: Vec::new(), + expect_stderr_not_contains: Vec::new(), + expect_neither_contains: Vec::new(), + expect_json: None, + stream_output: false, + } +} + +pub trait Tap { + fn tap(self, callback: F) -> Self; +} + +impl Tap for T { + fn tap(mut self, callback: F) -> T { + callback(&mut self); + self + } +} + +pub fn basic_bin_manifest(name: &str) -> String { + format!( + r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [[bin]] + + name = "{}" + "#, + name, name + ) +} + +pub fn basic_lib_manifest(name: &str) -> String { + format!( + r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "{}" + "#, + name, name + ) +} + +pub fn path2url(p: PathBuf) -> Url { + Url::from_file_path(&*p).ok().unwrap() +} + +fn substitute_macros(input: &str) -> String { + let macros = [ + ("[RUNNING]", " Running"), + ("[COMPILING]", " Compiling"), + ("[CREATED]", " Created"), + ("[FINISHED]", " Finished"), + ("[ERROR]", "error:"), + ("[WARNING]", "warning:"), + ("[DOCUMENTING]", " Documenting"), + ("[FRESH]", " Fresh"), + ("[UPDATING]", " Updating"), + ("[ADDING]", " Adding"), + ("[REMOVING]", " Removing"), + ("[DOCTEST]", " Doc-tests"), + ("[PACKAGING]", " Packaging"), + ("[DOWNLOADING]", " Downloading"), + ("[UPLOADING]", " Uploading"), + ("[VERIFYING]", " Verifying"), + ("[ARCHIVING]", " Archiving"), + ("[INSTALLING]", " Installing"), + ("[REPLACING]", " Replacing"), + ("[UNPACKING]", " Unpacking"), + ("[SUMMARY]", " Summary"), + ("[EXE]", if cfg!(windows) { ".exe" } else { "" }), + ("[/]", if cfg!(windows) { "\\" } else { "/" }), + ]; + let mut result = input.to_owned(); + for &(pat, subst) in macros.iter() { + result = result.replace(pat, subst) + } + return result; +} diff --git a/tests/testsuite/cargotest/support/paths.rs b/tests/testsuite/cargotest/support/paths.rs new file mode 100644 index 000000000..78b30ee77 --- /dev/null +++ b/tests/testsuite/cargotest/support/paths.rs @@ -0,0 +1,165 @@ +use std::env; +use std::cell::Cell; +use std::fs; +use std::io::{self, ErrorKind}; +use std::path::{Path, PathBuf}; +use std::sync::{Once, ONCE_INIT}; +use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; + +use filetime::{self, FileTime}; + +static CARGO_INTEGRATION_TEST_DIR: &'static str = "cit"; +static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; + +thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); + +fn init() { + static GLOBAL_INIT: Once = ONCE_INIT; + thread_local!(static LOCAL_INIT: Cell = Cell::new(false)); + GLOBAL_INIT.call_once(|| { + global_root().mkdir_p(); + }); + LOCAL_INIT.with(|i| { + if i.get() { + return; + } + i.set(true); + root().rm_rf(); + home().mkdir_p(); + }) +} + +fn global_root() -> PathBuf { + let mut path = t!(env::current_exe()); + path.pop(); // chop off exe name + path.pop(); // chop off 'debug' + + // If `cargo test` is run manually then our path looks like + // `target/debug/foo`, in which case our `path` is already pointing at + // `target`. If, however, `cargo test --target $target` is used then the + // output is `target/$target/debug/foo`, so our path is pointing at + // `target/$target`. Here we conditionally pop the `$target` name. + if path.file_name().and_then(|s| s.to_str()) != Some("target") { + path.pop(); + } + + path.join(CARGO_INTEGRATION_TEST_DIR) +} + +pub fn root() -> PathBuf { + init(); + global_root().join(&TASK_ID.with(|my_id| format!("t{}", my_id))) +} + +pub fn home() -> PathBuf { + root().join("home") +} + +pub trait CargoPathExt { + fn rm_rf(&self); + fn mkdir_p(&self); + + fn move_into_the_past(&self) { + self.move_in_time(|sec, nsec| (sec - 3600, nsec)) + } + + fn move_into_the_future(&self) { + self.move_in_time(|sec, nsec| (sec + 3600, nsec)) + } + + fn move_in_time(&self, travel_amount: F) + where + F: Fn(u64, u32) -> (u64, u32); +} + +impl CargoPathExt for Path { + /* Technically there is a potential race condition, but we don't + * care all that much for our tests + */ + fn rm_rf(&self) { + if !self.exists() { + return; + } + + for file in t!(fs::read_dir(self)) { + let file = t!(file); + if file.file_type().map(|m| m.is_dir()).unwrap_or(false) { + file.path().rm_rf(); + } else { + // On windows we can't remove a readonly file, and git will + // often clone files as readonly. As a result, we have some + // special logic to remove readonly files on windows. + do_op(&file.path(), "remove file", |p| fs::remove_file(p)); + } + } + do_op(self, "remove dir", |p| fs::remove_dir(p)); + } + + fn mkdir_p(&self) { + fs::create_dir_all(self) + .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e)) + } + + fn move_in_time(&self, travel_amount: F) + where + F: Fn(u64, u32) -> ((u64, u32)), + { + if self.is_file() { + time_travel(self, &travel_amount); + } else { + recurse(self, &self.join("target"), &travel_amount); + } + + fn recurse(p: &Path, bad: &Path, travel_amount: &F) + where + F: Fn(u64, u32) -> ((u64, u32)), + { + if p.is_file() { + time_travel(p, travel_amount) + } else if !p.starts_with(bad) { + for f in t!(fs::read_dir(p)) { + let f = t!(f).path(); + recurse(&f, bad, travel_amount); + } + } + } + + fn time_travel(path: &Path, travel_amount: &F) + where + F: Fn(u64, u32) -> ((u64, u32)), + { + let stat = t!(path.metadata()); + + let mtime = FileTime::from_last_modification_time(&stat); + + let (sec, nsec) = travel_amount(mtime.seconds_relative_to_1970(), mtime.nanoseconds()); + let newtime = FileTime::from_seconds_since_1970(sec, nsec); + + // Sadly change_file_times has a failure mode where a readonly file + // cannot have its times changed on windows. + do_op(path, "set file times", |path| { + filetime::set_file_times(path, newtime, newtime) + }); + } + } +} + +fn do_op(path: &Path, desc: &str, mut f: F) +where + F: FnMut(&Path) -> io::Result<()>, +{ + match f(path) { + Ok(()) => {} + Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { + let mut p = t!(path.metadata()).permissions(); + p.set_readonly(false); + t!(fs::set_permissions(path, p)); + f(path).unwrap_or_else(|e| { + panic!("failed to {} {}: {}", desc, path.display(), e); + }) + } + Err(e) => { + panic!("failed to {} {}: {}", desc, path.display(), e); + } + } +} diff --git a/tests/testsuite/cargotest/support/publish.rs b/tests/testsuite/cargotest/support/publish.rs new file mode 100644 index 000000000..88c7bcbab --- /dev/null +++ b/tests/testsuite/cargotest/support/publish.rs @@ -0,0 +1,62 @@ +use std::path::PathBuf; +use std::io::prelude::*; +use std::fs::{self, File}; + +use cargotest::support::paths; +use cargotest::support::git::{repo, Repository}; + +use url::Url; + +pub fn setup() -> Repository { + let config = paths::root().join(".cargo/config"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all( + format!( + r#" + [registry] + token = "api-token" + + [registries.alternative] + index = "{registry}" + "#, + registry = registry().to_string() + ).as_bytes() + )); + + let credentials = paths::root().join("home/.cargo/credentials"); + t!(fs::create_dir_all(credentials.parent().unwrap())); + t!(t!(File::create(&credentials)).write_all( + br#" + [registries.alternative] + token = "api-token" + "# + )); + + t!(fs::create_dir_all(&upload_path().join("api/v1/crates"))); + + repo(®istry_path()) + .file( + "config.json", + &format!( + r#"{{ + "dl": "{0}", + "api": "{0}" + }}"#, + upload() + ), + ) + .build() +} + +fn registry_path() -> PathBuf { + paths::root().join("registry") +} +pub fn registry() -> Url { + Url::from_file_path(&*registry_path()).ok().unwrap() +} +pub fn upload_path() -> PathBuf { + paths::root().join("upload") +} +fn upload() -> Url { + Url::from_file_path(&*upload_path()).ok().unwrap() +} diff --git a/tests/testsuite/cargotest/support/registry.rs b/tests/testsuite/cargotest/support/registry.rs new file mode 100644 index 000000000..2842fad2f --- /dev/null +++ b/tests/testsuite/cargotest/support/registry.rs @@ -0,0 +1,372 @@ +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use cargo::util::Sha256; +use flate2::Compression; +use flate2::write::GzEncoder; +use git2; +use hex; +use tar::{Builder, Header}; +use url::Url; + +use cargotest::support::paths; +use cargotest::support::git::repo; + +pub fn registry_path() -> PathBuf { + paths::root().join("registry") +} +pub fn registry() -> Url { + Url::from_file_path(&*registry_path()).ok().unwrap() +} +pub fn dl_path() -> PathBuf { + paths::root().join("dl") +} +pub fn dl_url() -> Url { + Url::from_file_path(&*dl_path()).ok().unwrap() +} +pub fn alt_registry_path() -> PathBuf { + paths::root().join("alternative-registry") +} +pub fn alt_registry() -> Url { + Url::from_file_path(&*alt_registry_path()).ok().unwrap() +} +pub fn alt_dl_path() -> PathBuf { + paths::root().join("alt_dl") +} +pub fn alt_dl_url() -> String { + let base = Url::from_file_path(&*alt_dl_path()).ok().unwrap(); + format!("{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate", base) +} +pub fn alt_api_path() -> PathBuf { + paths::root().join("alt_api") +} +pub fn alt_api_url() -> Url { + Url::from_file_path(&*alt_api_path()).ok().unwrap() +} + +pub struct Package { + name: String, + vers: String, + deps: Vec, + files: Vec<(String, String)>, + extra_files: Vec<(String, String)>, + yanked: bool, + features: HashMap>, + local: bool, + alternative: bool, +} + +struct Dependency { + name: String, + vers: String, + kind: String, + target: Option, + features: Vec, + registry: Option, +} + +pub fn init() { + let config = paths::home().join(".cargo/config"); + t!(fs::create_dir_all(config.parent().unwrap())); + if fs::metadata(&config).is_ok() { + return; + } + t!(t!(File::create(&config)).write_all( + format!( + r#" + [registry] + token = "api-token" + + [source.crates-io] + registry = 'https://wut' + replace-with = 'dummy-registry' + + [source.dummy-registry] + registry = '{reg}' + + [registries.alternative] + index = '{alt}' + "#, + reg = registry(), + alt = alt_registry() + ).as_bytes() + )); + + // Init a new registry + let _ = repo(®istry_path()) + .file( + "config.json", + &format!( + r#" + {{"dl":"{0}","api":"{0}"}} + "#, + dl_url() + ), + ) + .build(); + fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap(); + + // Init an alt registry + repo(&alt_registry_path()) + .file( + "config.json", + &format!( + r#" + {{"dl":"{}","api":"{}"}} + "#, + alt_dl_url(), + alt_api_url() + ), + ) + .build(); + fs::create_dir_all(alt_api_path().join("api/v1/crates")).unwrap(); +} + +impl Package { + pub fn new(name: &str, vers: &str) -> Package { + init(); + Package { + name: name.to_string(), + vers: vers.to_string(), + deps: Vec::new(), + files: Vec::new(), + extra_files: Vec::new(), + yanked: false, + features: HashMap::new(), + local: false, + alternative: false, + } + } + + pub fn local(&mut self, local: bool) -> &mut Package { + self.local = local; + self + } + + pub fn alternative(&mut self, alternative: bool) -> &mut Package { + self.alternative = alternative; + self + } + + pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { + self.files.push((name.to_string(), contents.to_string())); + self + } + + pub fn extra_file(&mut self, name: &str, contents: &str) -> &mut Package { + self.extra_files + .push((name.to_string(), contents.to_string())); + self + } + + pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.full_dep(name, vers, None, "normal", &[], None) + } + + pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package { + self.full_dep(name, vers, None, "normal", features, None) + } + + pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package { + self.full_dep(name, vers, Some(target), "normal", &[], None) + } + + pub fn registry_dep(&mut self, name: &str, vers: &str, registry: &str) -> &mut Package { + self.full_dep(name, vers, None, "normal", &[], Some(registry)) + } + + pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.full_dep(name, vers, None, "dev", &[], None) + } + + fn full_dep( + &mut self, + name: &str, + vers: &str, + target: Option<&str>, + kind: &str, + features: &[&str], + registry: Option<&str>, + ) -> &mut Package { + self.deps.push(Dependency { + name: name.to_string(), + vers: vers.to_string(), + kind: kind.to_string(), + target: target.map(|s| s.to_string()), + features: features.iter().map(|s| s.to_string()).collect(), + registry: registry.map(|s| s.to_string()), + }); + self + } + + pub fn yanked(&mut self, yanked: bool) -> &mut Package { + self.yanked = yanked; + self + } + + pub fn publish(&self) -> String { + self.make_archive(); + + // Figure out what we're going to write into the index + let deps = self.deps + .iter() + .map(|dep| { + json!({ + "name": dep.name, + "req": dep.vers, + "features": dep.features, + "default_features": true, + "target": dep.target, + "optional": false, + "kind": dep.kind, + "registry": dep.registry, + }) + }) + .collect::>(); + let cksum = { + let mut c = Vec::new(); + t!(t!(File::open(&self.archive_dst())).read_to_end(&mut c)); + cksum(&c) + }; + let line = json!({ + "name": self.name, + "vers": self.vers, + "deps": deps, + "cksum": cksum, + "features": self.features, + "yanked": self.yanked, + }).to_string(); + + let file = match self.name.len() { + 1 => format!("1/{}", self.name), + 2 => format!("2/{}", self.name), + 3 => format!("3/{}/{}", &self.name[..1], self.name), + _ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name), + }; + + let registry_path = if self.alternative { + alt_registry_path() + } else { + registry_path() + }; + + // Write file/line in the index + let dst = if self.local { + registry_path.join("index").join(&file) + } else { + registry_path.join(&file) + }; + let mut prev = String::new(); + let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)).write_all((prev + &line[..] + "\n").as_bytes())); + + // Add the new file to the index + if !self.local { + let repo = t!(git2::Repository::open(®istry_path)); + let mut index = t!(repo.index()); + t!(index.add_path(Path::new(&file))); + t!(index.write()); + let id = t!(index.write_tree()); + + // Commit this change + let tree = t!(repo.find_tree(id)); + let sig = t!(repo.signature()); + let parent = t!(repo.refname_to_id("refs/heads/master")); + let parent = t!(repo.find_commit(parent)); + t!(repo.commit( + Some("HEAD"), + &sig, + &sig, + "Another commit", + &tree, + &[&parent] + )); + } + + return cksum; + } + + fn make_archive(&self) { + let mut manifest = format!( + r#" + [package] + name = "{}" + version = "{}" + authors = [] + "#, + self.name, self.vers + ); + for dep in self.deps.iter() { + let target = match dep.target { + None => String::new(), + Some(ref s) => format!("target.'{}'.", s), + }; + let kind = match &dep.kind[..] { + "build" => "build-", + "dev" => "dev-", + _ => "", + }; + manifest.push_str(&format!( + r#" + [{}{}dependencies.{}] + version = "{}" + "#, + target, kind, dep.name, dep.vers + )); + } + + let dst = self.archive_dst(); + t!(fs::create_dir_all(dst.parent().unwrap())); + let f = t!(File::create(&dst)); + let mut a = Builder::new(GzEncoder::new(f, Compression::default())); + self.append(&mut a, "Cargo.toml", &manifest); + if self.files.is_empty() { + self.append(&mut a, "src/lib.rs", ""); + } else { + for &(ref name, ref contents) in self.files.iter() { + self.append(&mut a, name, contents); + } + } + for &(ref name, ref contents) in self.extra_files.iter() { + self.append_extra(&mut a, name, contents); + } + } + + fn append(&self, ar: &mut Builder, file: &str, contents: &str) { + self.append_extra( + ar, + &format!("{}-{}/{}", self.name, self.vers, file), + contents, + ); + } + + fn append_extra(&self, ar: &mut Builder, path: &str, contents: &str) { + let mut header = Header::new_ustar(); + header.set_size(contents.len() as u64); + t!(header.set_path(path)); + header.set_cksum(); + t!(ar.append(&header, contents.as_bytes())); + } + + pub fn archive_dst(&self) -> PathBuf { + if self.local { + registry_path().join(format!("{}-{}.crate", self.name, self.vers)) + } else if self.alternative { + alt_dl_path() + .join(&self.name) + .join(&self.vers) + .join(&format!("{}-{}.crate", self.name, self.vers)) + } else { + dl_path().join(&self.name).join(&self.vers).join("download") + } + } +} + +pub fn cksum(s: &[u8]) -> String { + let mut sha = Sha256::new(); + sha.update(s); + hex::encode(&sha.finish()) +} diff --git a/tests/testsuite/cfg.rs b/tests/testsuite/cfg.rs new file mode 100644 index 000000000..369f48d16 --- /dev/null +++ b/tests/testsuite/cfg.rs @@ -0,0 +1,447 @@ +use std::str::FromStr; +use std::fmt; + +use cargo::util::{Cfg, CfgExpr}; +use cargotest::rustc_host; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +macro_rules! c { + ($a:ident) => ( + Cfg::Name(stringify!($a).to_string()) + ); + ($a:ident = $e:expr) => ( + Cfg::KeyPair(stringify!($a).to_string(), $e.to_string()) + ); +} + +macro_rules! e { + (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*])); + (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*])); + (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*)))); + (($($t:tt)*)) => (e!($($t)*)); + ($($t:tt)*) => (CfgExpr::Value(c!($($t)*))); +} + +fn good(s: &str, expected: T) +where + T: FromStr + PartialEq + fmt::Debug, + T::Err: fmt::Display, +{ + let c = match T::from_str(s) { + Ok(c) => c, + Err(e) => panic!("failed to parse `{}`: {}", s, e), + }; + assert_eq!(c, expected); +} + +fn bad(s: &str, err: &str) +where + T: FromStr + fmt::Display, + T::Err: fmt::Display, +{ + let e = match T::from_str(s) { + Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg), + Err(e) => e.to_string(), + }; + assert!( + e.contains(err), + "when parsing `{}`,\n\"{}\" not contained \ + inside: {}", + s, + err, + e + ); +} + +#[test] +fn cfg_syntax() { + good("foo", c!(foo)); + good("_bar", c!(_bar)); + good(" foo", c!(foo)); + good(" foo ", c!(foo)); + good(" foo = \"bar\"", c!(foo = "bar")); + good("foo=\"\"", c!(foo = "")); + good(" foo=\"3\" ", c!(foo = "3")); + good("foo = \"3 e\"", c!(foo = "3 e")); +} + +#[test] +fn cfg_syntax_bad() { + bad::("", "found nothing"); + bad::(" ", "found nothing"); + bad::("\t", "unexpected character"); + bad::("7", "unexpected character"); + bad::("=", "expected identifier"); + bad::(",", "expected identifier"); + bad::("(", "expected identifier"); + bad::("foo (", "malformed cfg value"); + bad::("bar =", "expected a string"); + bad::("bar = \"", "unterminated string"); + bad::("foo, bar", "malformed cfg value"); +} + +#[test] +fn cfg_expr() { + good("foo", e!(foo)); + good("_bar", e!(_bar)); + good(" foo", e!(foo)); + good(" foo ", e!(foo)); + good(" foo = \"bar\"", e!(foo = "bar")); + good("foo=\"\"", e!(foo = "")); + good(" foo=\"3\" ", e!(foo = "3")); + good("foo = \"3 e\"", e!(foo = "3 e")); + + good("all()", e!(all())); + good("all(a)", e!(all(a))); + good("all(a, b)", e!(all(a, b))); + good("all(a, )", e!(all(a))); + good("not(a = \"b\")", e!(not(a = "b"))); + good("not(all(a))", e!(not(all(a)))); +} + +#[test] +fn cfg_expr_bad() { + bad::(" ", "found nothing"); + bad::(" all", "expected `(`"); + bad::("all(a", "expected `)`"); + bad::("not", "expected `(`"); + bad::("not(a", "expected `)`"); + bad::("a = ", "expected a string"); + bad::("all(not())", "expected identifier"); + bad::("foo(a)", "consider using all() or any() explicitly"); +} + +#[test] +fn cfg_matches() { + assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo, bar)).matches(&[c!(bar)])); + assert!(e!(any(foo, bar)).matches(&[c!(foo)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(not(foo)).matches(&[c!(bar)])); + assert!(e!(not(foo)).matches(&[])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)])); + + assert!(!e!(foo).matches(&[])); + assert!(!e!(foo).matches(&[c!(bar)])); + assert!(!e!(foo).matches(&[c!(fo)])); + assert!(!e!(any(foo)).matches(&[])); + assert!(!e!(any(foo)).matches(&[c!(bar)])); + assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo, bar)).matches(&[c!(bar)])); + assert!(!e!(all(foo, bar)).matches(&[c!(foo)])); + assert!(!e!(all(foo, bar)).matches(&[])); + assert!(!e!(not(bar)).matches(&[c!(bar)])); + assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)])); + assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)])); +} + +#[test] +fn cfg_easy() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = { path = 'b' } + [target."cfg(windows)".dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn dont_include() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + b = {{ path = 'b' }} + "#, + other_family + ), + ) + .file("src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn works_through_the_registry() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0") + .target_dep("foo", "0.1.0", "cfg(unix)") + .target_dep("foo", "0.1.0", "cfg(windows)") + .publish(); + + let p = project("a") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate bar;", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry [..] +[DOWNLOADING] [..] +[DOWNLOADING] [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn ignore_version_from_other_platform() { + let this_family = if cfg!(unix) { "unix" } else { "windows" }; + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + + let p = project("a") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + foo = "0.1.0" + + [target.'cfg({})'.dependencies] + foo = "0.2.0" + "#, + this_family, other_family + ), + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate foo;", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry [..] +[DOWNLOADING] [..] +[COMPILING] foo v0.1.0 +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn bad_target_spec() { + let p = project("a") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(4)'.dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `4` as a cfg expression + +Caused by: + unexpected character in cfg `4`, [..] +", + ), + ); +} + +#[test] +fn bad_target_spec2() { + let p = project("a") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(foo =)'.dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `foo =` as a cfg expression + +Caused by: + expected a string, found nothing +", + ), + ); +} + +#[test] +fn multiple_match_ok() { + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "unix")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(windows)".dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "windows")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(any(windows, unix))".dependencies] + b = {{ path = 'b' }} + + [target.{}.dependencies] + b = {{ path = 'b' }} + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn any_ok() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target."cfg(any(windows, unix))".dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} diff --git a/tests/testsuite/check-style.sh b/tests/testsuite/check-style.sh new file mode 100755 index 000000000..72d7ac65b --- /dev/null +++ b/tests/testsuite/check-style.sh @@ -0,0 +1,3 @@ +echo "checking for lines over 100 characters..." +find src tests -name '*.rs' | xargs grep '.\{101,\}' && exit 1 +echo "ok" diff --git a/tests/testsuite/check.rs b/tests/testsuite/check.rs new file mode 100644 index 000000000..264abf898 --- /dev/null +++ b/tests/testsuite/check.rs @@ -0,0 +1,908 @@ +use cargotest::install::exe; +use cargotest::is_nightly; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use glob::glob; +use hamcrest::{assert_that, existing_file, is_not}; + +const SIMPLE_MANIFEST: &str = r#" +[package] +name = "foo" +version = "0.0.1" +authors = [] +"#; + +#[test] +fn check_success() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that(foo.cargo("check"), execs().with_status(0)); +} + +#[test] +fn check_fail() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::baz(42); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that(foo.cargo("check"), execs().with_status(101)); +} + +#[test] +fn custom_derive() { + if !is_nightly() { + return; + } + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" +#![feature(proc_macro)] + +#[macro_use] +extern crate bar; + +trait B { + fn b(&self); +} + +#[derive(B)] +struct A; + +fn main() { + let a = A; + a.b(); +} +"#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" +#![feature(proc_macro, proc_macro_lib)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_derive(B)] +pub fn derive(_input: TokenStream) -> TokenStream { + format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap() +} +"#, + ) + .build(); + + assert_that(foo.cargo("check"), execs().with_status(0)); +} + +#[test] +fn check_build() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#, + ) + .build(); + + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that(foo.cargo("check"), execs().with_status(0)); + assert_that(foo.cargo("build"), execs().with_status(0)); +} + +#[test] +fn build_check() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#, + ) + .build(); + + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); + assert_that(foo.cargo("check"), execs().with_status(0)); +} + +// Checks that where a project has both a lib and a bin, the lib is only checked +// not built. +#[test] +fn issue_3418() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + foo.cargo("check").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --emit=dep-info,metadata [..]"), + ); +} + +// Some weirdness that seems to be caused by a crate being built as well as +// checked, but in this case with a proc macro too. +#[test] +fn issue_3419() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + rustc-serialize = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate rustc_serialize; + + use rustc_serialize::Decodable; + + pub fn take() {} + "#, + ) + .file( + "src/main.rs", + r#" + extern crate rustc_serialize; + + extern crate foo; + + #[derive(RustcDecodable)] + pub struct Foo; + + fn main() { + foo::take::(); + } + "#, + ) + .build(); + + Package::new("rustc-serialize", "1.0.0") + .file( + "src/lib.rs", + r#"pub trait Decodable: Sized { + fn decode(d: &mut D) -> Result; + } + pub trait Decoder { + type Error; + fn read_struct(&mut self, s_name: &str, len: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + } "#, + ) + .publish(); + + assert_that(p.cargo("check"), execs().with_status(0)); +} + +// Check on a dylib should have a different metadata hash than build. +#[test] +fn dylib_check_preserves_build_cache() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [lib] + crate-type = ["dylib"] + + [dependencies] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that(p.cargo("check"), execs().with_status(0)); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); +} + +// test `cargo rustc --profile check` +#[test] +fn rustc_check() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::baz(); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustc") + .arg("--profile") + .arg("check") + .arg("--") + .arg("--emit=metadata"), + execs().with_status(0), + ); +} + +#[test] +fn rustc_check_err() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + ::bar::qux(); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustc") + .arg("--profile") + .arg("check") + .arg("--") + .arg("--emit=metadata"), + execs().with_status(101), + ); +} + +#[test] +fn check_all() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + [dependencies] + b = { path = "b" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("tests/a.rs", "") + .file("src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/main.rs", "fn main() {}") + .file("b/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("check").arg("--all").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]") + .with_stderr_contains("[..] --crate-name b b[/]src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name b b[/]src[/]main.rs [..]"), + ); +} + +#[test] +fn check_virtual_all_implied() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("check").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --crate-name foo foo[/]src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name bar bar[/]src[/]lib.rs [..]"), + ); +} + +#[test] +fn targets_selected_default() { + let foo = project("foo") + .file("Cargo.toml", SIMPLE_MANIFEST) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "pub fn smth() {}") + .file("examples/example1.rs", "fn main() {}") + .file("tests/test2.rs", "#[test] fn t() {}") + .file("benches/bench3.rs", "") + .build(); + + assert_that( + foo.cargo("check").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name example1 examples[/]example1.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name test2 tests[/]test2.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name bench3 benches[/]bench3.rs [..]"), + ); +} + +#[test] +fn targets_selected_all() { + let foo = project("foo") + .file("Cargo.toml", SIMPLE_MANIFEST) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "pub fn smth() {}") + .file("examples/example1.rs", "fn main() {}") + .file("tests/test2.rs", "#[test] fn t() {}") + .file("benches/bench3.rs", "") + .build(); + + assert_that( + foo.cargo("check").arg("--all-targets").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]") + .with_stderr_contains("[..] --crate-name example1 examples[/]example1.rs [..]") + .with_stderr_contains("[..] --crate-name test2 tests[/]test2.rs [..]") + .with_stderr_contains("[..] --crate-name bench3 benches[/]bench3.rs [..]"), + ); +} + +#[test] +fn check_unit_test_profile() { + let foo = project("foo") + .file("Cargo.toml", SIMPLE_MANIFEST) + .file( + "src/lib.rs", + r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + badtext + } + } + "#, + ) + .build(); + + assert_that(foo.cargo("check"), execs().with_status(0)); + assert_that( + foo.cargo("check").arg("--profile").arg("test"), + execs() + .with_status(101) + .with_stderr_contains("[..]badtext[..]"), + ); +} + +// Verify what is checked with various command-line filters. +#[test] +fn check_filters() { + let p = project("foo") + .file("Cargo.toml", SIMPLE_MANIFEST) + .file( + "src/lib.rs", + r#" + fn unused_normal_lib() {} + #[cfg(test)] + mod tests { + fn unused_unit_lib() {} + } + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + fn unused_normal_bin() {} + #[cfg(test)] + mod tests { + fn unused_unit_bin() {} + } + "#, + ) + .file( + "tests/t1.rs", + r#" + fn unused_normal_t1() {} + #[cfg(test)] + mod tests { + fn unused_unit_t1() {} + } + "#, + ) + .file( + "examples/ex1.rs", + r#" + fn main() {} + fn unused_normal_ex1() {} + #[cfg(test)] + mod tests { + fn unused_unit_ex1() {} + } + "#, + ) + .file( + "benches/b1.rs", + r#" + fn unused_normal_b1() {} + #[cfg(test)] + mod tests { + fn unused_unit_b1() {} + } + "#, + ) + .build(); + + assert_that( + p.cargo("check"), + execs() + .with_status(0) + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_does_not_contain("unused_normal_t1") + .with_stderr_does_not_contain("unused_normal_ex1") + .with_stderr_does_not_contain("unused_normal_b1") + .with_stderr_does_not_contain("unused_unit_"), + ); + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--tests").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..] --test [..]") + .with_stderr_contains("[..] --crate-name foo src[/]lib.rs --crate-type lib [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..] --test [..]") + .with_stderr_contains("[..] --crate-name foo src[/]main.rs --crate-type bin [..]") + .with_stderr_contains("[..]unused_unit_lib[..]") + .with_stderr_contains("[..]unused_unit_bin[..]") + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_contains("[..]unused_normal_ex1[..]") + .with_stderr_contains("[..]unused_unit_ex1[..]") + .with_stderr_does_not_contain("unused_normal_b1") + .with_stderr_does_not_contain("unused_unit_b1"), + ); + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--test").arg("t1").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_does_not_contain("unused_unit_lib") + .with_stderr_does_not_contain("unused_unit_bin") + .with_stderr_does_not_contain("unused_normal_ex1") + .with_stderr_does_not_contain("unused_normal_b1") + .with_stderr_does_not_contain("unused_unit_ex1") + .with_stderr_does_not_contain("unused_unit_b1"), + ); + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--all-targets").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_contains("[..]unused_normal_t1[..]") + .with_stderr_contains("[..]unused_normal_ex1[..]") + .with_stderr_contains("[..]unused_normal_b1[..]") + .with_stderr_contains("[..]unused_unit_b1[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_contains("[..]unused_unit_lib[..]") + .with_stderr_contains("[..]unused_unit_bin[..]") + .with_stderr_contains("[..]unused_unit_ex1[..]"), + ); +} + +#[test] +fn check_artifacts() { + // Verify which artifacts are created when running check (#4059). + let p = project("foo") + .file("Cargo.toml", SIMPLE_MANIFEST) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/t1.rs", "") + .file("examples/ex1.rs", "fn main() {}") + .file("benches/b1.rs", "") + .build(); + assert_that(p.cargo("check"), execs().with_status(0)); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug").join(exe("foo")), + is_not(existing_file()), + ); + + p.root().join("target").rm_rf(); + assert_that(p.cargo("check").arg("--lib"), execs().with_status(0)); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug").join(exe("foo")), + is_not(existing_file()), + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--bin").arg("foo"), + execs().with_status(0), + ); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug").join(exe("foo")), + is_not(existing_file()), + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--test").arg("t1"), + execs().with_status(0), + ); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug").join(exe("foo")), + is_not(existing_file()), + ); + assert_eq!( + glob(&p.root().join("target/debug/t1-*").to_str().unwrap()) + .unwrap() + .count(), + 0 + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--example").arg("ex1"), + execs().with_status(0), + ); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug/examples").join(exe("ex1")), + is_not(existing_file()), + ); + + p.root().join("target").rm_rf(); + assert_that( + p.cargo("check").arg("--bench").arg("b1"), + execs().with_status(0), + ); + assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file()); + assert_that( + &p.root().join("target/debug/libfoo.rlib"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("target/debug").join(exe("foo")), + is_not(existing_file()), + ); + assert_eq!( + glob(&p.root().join("target/debug/b1-*").to_str().unwrap()) + .unwrap() + .count(), + 0 + ); +} diff --git a/tests/testsuite/clean.rs b/tests/testsuite/clean.rs new file mode 100644 index 000000000..f5a409f67 --- /dev/null +++ b/tests/testsuite/clean.rs @@ -0,0 +1,317 @@ +use std::env; + +use cargotest::support::{basic_bin_manifest, execs, git, main_file, project}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; + +#[test] +fn cargo_clean_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.build_dir(), existing_dir()); + + assert_that(p.cargo("clean"), execs().with_status(0)); + assert_that(&p.build_dir(), is_not(existing_dir())); +} + +#[test] +fn different_dir() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("src/bar/a.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.build_dir(), existing_dir()); + + assert_that( + p.cargo("clean").cwd(&p.root().join("src")), + execs().with_status(0).with_stdout(""), + ); + assert_that(&p.build_dir(), is_not(existing_dir())); +} + +#[test] +fn clean_multiple_packages() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#, + ) + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + "#, + ) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + assert_that(p.cargo("build -p d1 -p d2 -p foo"), execs().with_status(0)); + + let d1_path = &p.build_dir() + .join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir() + .join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, existing_file()); + assert_that(d2_path, existing_file()); + + assert_that( + p.cargo("clean -p d1 -p d2").cwd(&p.root().join("src")), + execs().with_status(0).with_stdout(""), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, is_not(existing_file())); + assert_that(d2_path, is_not(existing_file())); +} + +#[test] +fn clean_release() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--release"), execs().with_status(0)); + + assert_that( + p.cargo("clean").arg("-p").arg("foo"), + execs().with_status(0), + ); + assert_that( + p.cargo("build").arg("--release"), + execs().with_status(0).with_stdout(""), + ); + + assert_that( + p.cargo("clean").arg("-p").arg("foo").arg("--release"), + execs().with_status(0), + ); + assert_that( + p.cargo("build").arg("--release"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +#[test] +fn build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + use std::path::PathBuf; + use std::env; + + fn main() { + let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + if env::var("FIRST").is_ok() { + std::fs::File::create(out.join("out")).unwrap(); + } else { + assert!(!std::fs::metadata(out.join("out")).is_ok()); + } + } + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").env("FIRST", "1"), execs().with_status(0)); + assert_that( + p.cargo("clean").arg("-p").arg("foo"), + execs().with_status(0), + ); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]build-script-build` +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn clean_git() { + let git = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + dep = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("clean").arg("-p").arg("dep"), + execs().with_status(0).with_stdout(""), + ); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("clean").arg("-p").arg("bar"), + execs().with_status(0).with_stdout(""), + ); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn clean_verbose() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("clean").arg("-p").arg("bar").arg("--verbose"), + execs().with_status(0).with_stderr( + "\ +[REMOVING] [..] +[REMOVING] [..] +", + ), + ); + assert_that(p.cargo("build"), execs().with_status(0)); +} diff --git a/tests/testsuite/concurrent.rs b/tests/testsuite/concurrent.rs new file mode 100644 index 000000000..9c777e2cc --- /dev/null +++ b/tests/testsuite/concurrent.rs @@ -0,0 +1,639 @@ +use std::{env, str}; +use std::fs::{self, File}; +use std::io::Write; +use std::net::TcpListener; +use std::process::Stdio; +use std::thread; +use std::sync::mpsc::channel; +use std::time::Duration; + +use git2; +use cargotest; +use cargotest::install::{cargo_home, has_installed_exe}; +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::{assert_that, existing_file}; + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/main.rs", "fn main() {{}}") + .publish(); +} + +#[test] +fn multiple_installs() { + let p = project("foo") + .file( + "a/Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + "#, + ) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("install").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn concurrent_installs() { + const LOCKED_BUILD: &'static str = "waiting for file lock on build directory"; + + pkg("foo", "0.0.1"); + pkg("bar", "0.0.1"); + + let mut a = cargotest::cargo_process() + .arg("install") + .arg("foo") + .build_command(); + let mut b = cargotest::cargo_process() + .arg("install") + .arg("bar") + .build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD)); + assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD)); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn one_install_should_be_bad() { + let p = project("foo") + .file( + "a/Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("install").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + let (bad, good) = if a.status.code() == Some(101) { + (a, b) + } else { + (b, a) + }; + assert_that( + bad, + execs().with_status(101).with_stderr_contains( + "[ERROR] binary `foo[..]` already exists in destination as part of `[..]`", + ), + ); + assert_that( + good, + execs() + .with_status(0) + .with_stderr_contains("warning: be sure to add `[..]` to your PATH [..]"), + ); + + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn multiple_registry_fetches() { + let mut pkg = Package::new("bar", "1.0.2"); + for i in 0..10 { + let name = format!("foo{}", i); + Package::new(&name, "1.0.0").publish(); + pkg.dep(&name, "*"); + } + pkg.publish(); + + let p = project("foo") + .file( + "a/Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#, + ) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("build").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("build").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); + + let suffix = env::consts::EXE_SUFFIX; + assert_that( + &p.root() + .join("a/target/debug") + .join(format!("foo{}", suffix)), + existing_file(), + ); + assert_that( + &p.root() + .join("b/target/debug") + .join(format!("bar{}", suffix)), + existing_file(), + ); +} + +#[test] +fn git_same_repo_different_tags() { + let a = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn tag1() {}") + }).unwrap(); + + let repo = git2::Repository::open(&a.root()).unwrap(); + git::tag(&repo, "tag1"); + + File::create(a.root().join("src/lib.rs")) + .unwrap() + .write_all(b"pub fn tag2() {}") + .unwrap(); + git::add(&repo); + git::commit(&repo); + git::tag(&repo, "tag2"); + + let p = project("foo") + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag1' }} + "#, + a.url() + ), + ) + .file( + "a/src/main.rs", + "extern crate dep; fn main() { dep::tag1(); }", + ) + .file( + "b/Cargo.toml", + &format!( + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag2' }} + "#, + a.url() + ), + ) + .file( + "b/src/main.rs", + "extern crate dep; fn main() { dep::tag2(); }", + ); + let p = p.build(); + + let mut a = p.cargo("build") + .arg("-v") + .cwd(p.root().join("a")) + .build_command(); + let mut b = p.cargo("build") + .arg("-v") + .cwd(p.root().join("b")) + .build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn git_same_branch_different_revs() { + let a = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn f1() {}") + }).unwrap(); + + let p = project("foo") + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file( + "a/src/main.rs", + "extern crate dep; fn main() { dep::f1(); }", + ) + .file( + "b/Cargo.toml", + &format!( + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file( + "b/src/main.rs", + "extern crate dep; fn main() { dep::f2(); }", + ); + let p = p.build(); + + // Generate a Cargo.lock pointing at the current rev, then clear out the + // target directory + assert_that( + p.cargo("build").cwd(p.root().join("a")), + execs().with_status(0), + ); + fs::remove_dir_all(p.root().join("a/target")).unwrap(); + + // Make a new commit on the master branch + let repo = git2::Repository::open(&a.root()).unwrap(); + File::create(a.root().join("src/lib.rs")) + .unwrap() + .write_all(b"pub fn f2() {}") + .unwrap(); + git::add(&repo); + git::commit(&repo); + + // Now run both builds in parallel. The build of `b` should pick up the + // newest commit while the build of `a` should use the locked old commit. + let mut a = p.cargo("build").cwd(p.root().join("a")).build_command(); + let mut b = p.cargo("build").cwd(p.root().join("b")).build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn same_project() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", ""); + let p = p.build(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that(a, execs().with_status(0)); + assert_that(b, execs().with_status(0)); +} + +// Make sure that if Cargo dies while holding a lock that it's released and the +// next Cargo to come in will take over cleanly. +// older win versions don't support job objects, so skip test there +#[test] +#[cfg_attr(target_os = "windows", ignore)] +fn killing_cargo_releases_the_lock() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + use std::net::TcpStream; + + fn main() { + if std::env::var("A").is_ok() { + TcpStream::connect(&std::env::var("ADDR").unwrap()[..]) + .unwrap(); + std::thread::sleep(std::time::Duration::new(10, 0)); + } + } + "#, + ); + let p = p.build(); + + // Our build script will connect to our local TCP socket to inform us that + // it's started and that's how we know that `a` will have the lock + // when we kill it. + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + a.env("ADDR", l.local_addr().unwrap().to_string()) + .env("A", "a"); + b.env("ADDR", l.local_addr().unwrap().to_string()) + .env_remove("A"); + + // Spawn `a`, wait for it to get to the build script (at which point the + // lock is held), then kill it. + let mut a = a.spawn().unwrap(); + l.accept().unwrap(); + a.kill().unwrap(); + + // Spawn `b`, then just finish the output of a/b the same way the above + // tests does. + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + // We killed `a`, so it shouldn't succeed, but `b` should have succeeded. + assert!(!a.status.success()); + assert_that(b, execs().with_status(0)); +} + +#[test] +fn debug_release_ok() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + fs::remove_dir_all(p.root().join("target")).unwrap(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").arg("--release").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert_that( + a, + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.0 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + b, + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.0 [..] +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +#[test] +fn no_deadlock_with_git_dependencies() { + let dep1 = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep1" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let dep2 = git::new("dep2", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep2" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep1 = {{ git = '{}' }} + dep2 = {{ git = '{}' }} + "#, + dep1.url(), + dep2.url() + ), + ) + .file("src/main.rs", "fn main() { }"); + let p = p.build(); + + let n_concurrent_builds = 5; + + let (tx, rx) = channel(); + for _ in 0..n_concurrent_builds { + let cmd = p.cargo("build") + .build_command() + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn(); + let tx = tx.clone(); + thread::spawn(move || { + let result = cmd.unwrap().wait_with_output().unwrap(); + tx.send(result).unwrap() + }); + } + + //TODO: use `Receiver::recv_timeout` once it is stable. + let recv_timeout = |chan: &::std::sync::mpsc::Receiver<_>| { + for _ in 0..3000 { + if let Ok(x) = chan.try_recv() { + return x; + } + thread::sleep(Duration::from_millis(10)); + } + chan.try_recv().expect("Deadlock!") + }; + + for _ in 0..n_concurrent_builds { + let result = recv_timeout(&rx); + assert_that(result, execs().with_status(0)) + } +} diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs new file mode 100644 index 000000000..f76c278e9 --- /dev/null +++ b/tests/testsuite/config.rs @@ -0,0 +1,33 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn read_env_vars_for_config() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build").env("CARGO_BUILD_JOBS", "100"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/corrupt_git.rs b/tests/testsuite/corrupt_git.rs new file mode 100644 index 000000000..88a4defe1 --- /dev/null +++ b/tests/testsuite/corrupt_git.rs @@ -0,0 +1,187 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use cargo::util::paths as cargopaths; +use cargotest::support::paths; +use cargotest::support::{execs, git, project}; +use hamcrest::assert_that; + +#[test] +fn deleting_database_files() { + let project = project("foo"); + let git_project = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(project.cargo("build"), execs().with_status(0)); + + let mut files = Vec::new(); + find_files(&paths::home().join(".cargo/git/db"), &mut files); + assert!(files.len() > 0); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue; + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + assert_that( + project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0), + ); + + if !file.exists() { + continue; + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + assert_that( + project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0), + ); + } +} + +#[test] +fn deleting_checkout_files() { + let project = project("foo"); + let git_project = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(project.cargo("build"), execs().with_status(0)); + + let dir = paths::home() + .join(".cargo/git/checkouts") + // get the first entry in the checkouts dir for the package's location + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // get the first child of that checkout dir for our checkout + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // and throw on .git to corrupt things + .join(".git"); + let mut files = Vec::new(); + find_files(&dir, &mut files); + assert!(files.len() > 0); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue; + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + assert_that( + project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0), + ); + + if !file.exists() { + continue; + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + assert_that( + project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0), + ); + } +} + +fn make_writable(path: &Path) { + let mut p = path.metadata().unwrap().permissions(); + p.set_readonly(false); + fs::set_permissions(path, p).unwrap(); +} + +fn find_files(path: &Path, dst: &mut Vec) { + for e in path.read_dir().unwrap() { + let e = e.unwrap(); + let path = e.path(); + if e.file_type().unwrap().is_dir() { + find_files(&path, dst); + } else { + dst.push(path); + } + } +} diff --git a/tests/testsuite/cross_compile.rs b/tests/testsuite/cross_compile.rs new file mode 100644 index 000000000..c17a6fd71 --- /dev/null +++ b/tests/testsuite/cross_compile.rs @@ -0,0 +1,1407 @@ +use cargo::util::process; +use cargotest::{is_nightly, rustc_host}; +use cargotest::support::{basic_bin_manifest, cross_compile, execs, project}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn simple_cross() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0), + ); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that( + process(&p.target_bin(&target, "foo")), + execs().with_status(0), + ); +} + +#[test] +fn simple_cross_config() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + ".cargo/config", + &format!( + r#" + [build] + target = "{}" + "#, + cross_compile::alternate() + ), + ) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that( + process(&p.target_bin(&target, "foo")), + execs().with_status(0), + ); +} + +#[test] +fn simple_deps() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar(); } + "#, + ) + .build(); + let _p2 = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("build").arg("--target").arg(&target), + execs().with_status(0), + ); + assert_that(&p.target_bin(&target, "foo"), existing_file()); + + assert_that( + process(&p.target_bin(&target, "foo")), + execs().with_status(0), + ); +} + +#[test] +fn plugin_deps() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + return; + } + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, quote, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::codemap::Span; + use syntax::ast::*; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + use syntax::ext::build::AstBuilder; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + MacEager::expr(cx.expr_lit(sp, LitKind::Int(1, LitIntType::Unsuffixed))) + } + "#, + ) + .build(); + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that( + foo.cargo("build").arg("--target").arg(&target), + execs().with_status(0), + ); + assert_that(&foo.target_bin(&target, "foo"), existing_file()); + + assert_that( + process(&foo.target_bin(&target, "foo")), + execs().with_status(0), + ); +} + +#[test] +fn plugin_to_the_max() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + return; + } + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, quote, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + extern crate baz; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::codemap::Span; + use syntax::ast::*; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + use syntax::ext::build::AstBuilder; + use syntax::ptr::P; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + let bar = Ident::from_str("baz"); + let path = cx.path(sp, vec![bar.clone(), bar]); + MacEager::expr(cx.expr_call(sp, cx.expr_path(path), vec![])) + } + "#, + ) + .build(); + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that( + foo.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0), + ); + println!("second"); + assert_that( + foo.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(0), + ); + assert_that(&foo.target_bin(&target, "foo"), existing_file()); + + assert_that( + process(&foo.target_bin(&target, "foo")), + execs().with_status(0), + ); +} + +#[test] +fn linker_and_ar() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let p = project("foo") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "my-ar-tool" + linker = "my-linker-tool" + "#, + target + ), + ) + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(101).with_stderr_contains(&format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc --crate-name foo src[/]foo.rs --crate-type bin \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]{target}[/]debug[/]deps \ + --target {target} \ + -C ar=my-ar-tool -C linker=my-linker-tool \ + -L dependency={dir}[/]target[/]{target}[/]debug[/]deps \ + -L dependency={dir}[/]target[/]debug[/]deps` +", + dir = p.root().display(), + url = p.url(), + target = target, + )), + ); +} + +#[test] +fn plugin_with_extra_dylib_dep() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + return; + } + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#, + ) + .build(); + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + assert_that( + foo.cargo("build").arg("--target").arg(&target), + execs().with_status(0), + ); +} + +#[test] +fn cross_tests() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "bar" + "#, + ) + .file( + "src/bin/bar.rs", + &format!( + r#" + #[allow(unused_extern_crates)] + extern crate foo; + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + #[test] fn test() {{ main() }} + "#, + cross_compile::alternate_arch() + ), + ) + .file( + "src/lib.rs", + &format!( + r#" + use std::env; + pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} + #[test] fn test_foo() {{ foo() }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("test").arg("--target").arg(&target), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]bar-[..][EXE]", + foo = p.url(), + triple = target + )) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test ... ok"), + ); +} + +#[test] +fn no_cross_doctests() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.0.0" + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! extern crate foo; + //! assert!(true); + //! ``` + "#, + ) + .build(); + + let host_output = format!( + "\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo +", + foo = p.url() + ); + + println!("a"); + assert_that( + p.cargo("test"), + execs().with_status(0).with_stderr(&host_output), + ); + + println!("b"); + let target = cross_compile::host(); + assert_that( + p.cargo("test").arg("--target").arg(&target), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo +", + foo = p.url(), + triple = target + )), + ); + + println!("c"); + let target = cross_compile::alternate(); + assert_that( + p.cargo("test").arg("--target").arg(&target), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE] +", + foo = p.url(), + triple = target + )), + ); +} + +#[test] +fn simple_cargo_run() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#, + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("run").arg("--target").arg(&target), + execs().with_status(0), + ); +} + +#[test] +fn cross_with_a_build_script() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file( + "build.rs", + &format!( + r#" + use std::env; + use std::path::PathBuf; + fn main() {{ + assert_eq!(env::var("TARGET").unwrap(), "{0}"); + let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); + path.pop(); + assert!(path.file_name().unwrap().to_str().unwrap() + .starts_with("foo-")); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); + }} + "#, + target + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 (file://[..]) +[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..]` +[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + target = target, + dir = p.root().display() + )), + ); +} + +#[test] +fn build_script_needed_for_host_and_target() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + + [dependencies.d1] + path = "d1" + [build-dependencies.d2] + path = "d2" + "#, + ) + .file( + "build.rs", + r#" + #[allow(unused_extern_crates)] + extern crate d2; + fn main() { d2::d2(); } + "#, + ) + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() { d1::d1(); } + ", + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file( + "d1/src/lib.rs", + " + pub fn d1() {} + ", + ) + .file( + "d1/build.rs", + r#" + use std::env; + fn main() { + let target = env::var("TARGET").unwrap(); + println!("cargo:rustc-flags=-L /path/to/{}", target); + } + "#, + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + + [dependencies.d1] + path = "../d1" + "#, + ) + .file( + "d2/src/lib.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + pub fn d2() { d1::d1(); } + ", + ) + .build(); + + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs() + .with_status(0) + .with_stderr_contains(&format!( + "[COMPILING] d1 v0.0.0 ({url}/d1)", + url = p.url() + )) + .with_stderr_contains(&format!("[RUNNING] `rustc [..] d1[/]build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]d1-[..]`", + dir = p.root().display())) + .with_stderr_contains(&format!( + "[RUNNING] `{dir}[/]target[/]debug[/]build[/]d1-[..][/]build-script-build`", + dir = p.root().display() + )) + .with_stderr_contains( + "[RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]`", + ) + .with_stderr_contains(&format!( + "[COMPILING] d2 v0.0.0 ({url}/d2)", + url = p.url() + )) + .with_stderr_contains(&format!( + "\ + [RUNNING] `rustc [..] d2[/]src[/]lib.rs [..] \ + -L /path/to/{host}`", + host = host + )) + .with_stderr_contains(&format!( + "[COMPILING] foo v0.0.0 ({url})", + url = p.url() + )) + .with_stderr_contains(&format!("\ +[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..] \ + -L /path/to/{host}`", dir = p.root().display(), host = host)) + .with_stderr_contains(&format!( + "\ + [RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..] \ + -L /path/to/{target}`", + target = target + )), + ); +} + +#[test] +fn build_deps_for_the_right_arch() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.d2] + path = "d2" + "#, + ) + .file("src/main.rs", "extern crate d2; fn main() {}") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + "#, + ) + .file( + "d1/src/lib.rs", + " + pub fn d1() {} + ", + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "../d1" + "#, + ) + .file("d2/build.rs", "extern crate d1; fn main() {}") + .file("d2/src/lib.rs", "") + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn build_script_only_host() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "extern crate d1; fn main() {}") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "d1/src/lib.rs", + " + pub fn d1() {} + ", + ) + .file( + "d1/build.rs", + r#" + use std::env; + + fn main() { + assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") + .contains("target/debug/build/d1-"), + "bad: {:?}", env::var("OUT_DIR")); + } + "#, + ) + .build(); + + let target = cross_compile::alternate(); + assert_that( + p.cargo("build").arg("--target").arg(&target).arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn plugin_build_script_right_arch() { + if cross_compile::disabled() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build") + .arg("-v") + .arg("--target") + .arg(cross_compile::alternate()), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn build_script_with_platform_specific_dependencies() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() {} + ", + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + &format!( + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, + host + ), + ) + .file( + "d1/src/lib.rs", + " + #[allow(unused_extern_crates)] + extern crate d2; + ", + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + "#, + ) + .file("d2/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] d2 v0.0.0 ([..]) +[RUNNING] `rustc [..] d2[/]src[/]lib.rs [..]` +[COMPILING] d1 v0.0.0 ([..]) +[RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]` +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build` +[RUNNING] `rustc [..] src[/]lib.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + target = target + )), + ); +} + +#[test] +fn platform_specific_dependencies_do_not_leak() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.d1] + path = "d1" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file("build.rs", "extern crate d1; fn main() {}") + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + &format!( + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, + host + ), + ) + .file("d1/src/lib.rs", "extern crate d2;") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + "#, + ) + .file("d2/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("--target").arg(&target), + execs() + .with_status(101) + .with_stderr_contains("[..] can't find crate for `d2`[..]"), + ); +} + +#[test] +fn platform_specific_variables_reflected_in_build_scripts() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [target.{host}.dependencies] + d1 = {{ path = "d1" }} + + [target.{target}.dependencies] + d2 = {{ path = "d2" }} + "#, + host = host, + target = target + ), + ) + .file( + "build.rs", + &format!( + r#" + use std::env; + + fn main() {{ + let platform = env::var("TARGET").unwrap(); + let (expected, not_expected) = match &platform[..] {{ + "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), + "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), + _ => panic!("unknown platform") + }}; + + env::var(expected).ok() + .expect(&format!("missing {{}}", expected)); + env::var(not_expected).err() + .expect(&format!("found {{}}", not_expected)); + }} + "#, + host = host, + target = target + ), + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + links = "d1" + build = "build.rs" + "#, + ) + .file( + "d1/build.rs", + r#" + fn main() { println!("cargo:val=1") } + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + links = "d2" + build = "build.rs" + "#, + ) + .file( + "d2/build.rs", + r#" + fn main() { println!("cargo:val=1") } + "#, + ) + .file("d2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that( + p.cargo("build").arg("-v").arg("--target").arg(&target), + execs().with_status(0), + ); +} + +#[test] +fn cross_test_dylib() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file( + "bar/src/lib.rs", + &format!( + r#" + use std::env; + pub fn baz() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + assert_that( + p.cargo("test").arg("--target").arg(&target), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]{arch}[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]{arch}[/]debug[/]deps[/]test-[..][EXE]", + dir = p.url(), + arch = cross_compile::alternate() + )) + .with_stdout_contains_n("test foo ... ok", 2), + ); +} diff --git a/tests/testsuite/cross_publish.rs b/tests/testsuite/cross_publish.rs new file mode 100644 index 000000000..c67444125 --- /dev/null +++ b/tests/testsuite/cross_publish.rs @@ -0,0 +1,136 @@ +use std::fs::File; +use std::path::PathBuf; +use std::io::prelude::*; + +use cargotest::support::{cross_compile, execs, project, publish}; +use hamcrest::{assert_that, contains}; +use flate2::read::GzDecoder; +use tar::Archive; + +#[test] +fn simple_cross_package() { + if cross_compile::disabled() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + + assert_that( + p.cargo("package").arg("--target").arg(&target), + execs().with_status(0).with_status(0).with_stderr(&format!( + " Packaging foo v0.0.0 ({dir}) + Verifying foo v0.0.0 ({dir}) + Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + + // Check that the tarball contains the files + let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let entries = ar.entries().unwrap(); + let entry_paths = entries + .map(|entry| entry.unwrap().path().unwrap().into_owned()) + .collect::>(); + assert_that( + &entry_paths, + contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml")]), + ); + assert_that( + &entry_paths, + contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml.orig")]), + ); + assert_that( + &entry_paths, + contains(vec![PathBuf::from("foo-0.0.0/src/main.rs")]), + ); +} + +#[test] +fn publish_with_target() { + if cross_compile::disabled() { + return; + } + + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()) + .arg("--target") + .arg(&target), + execs().with_status(0).with_stderr(&format!( + " Updating registry `{registry}` + Packaging foo v0.0.0 ({dir}) + Verifying foo v0.0.0 ({dir}) + Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] + Uploading foo v0.0.0 ({dir}) +", + dir = p.url(), + registry = publish::registry() + )), + ); +} diff --git a/tests/testsuite/custom_target.rs b/tests/testsuite/custom_target.rs new file mode 100644 index 000000000..86f5b9542 --- /dev/null +++ b/tests/testsuite/custom_target.rs @@ -0,0 +1,170 @@ +use cargotest::is_nightly; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn custom_target_minimal() { + if !is_nightly() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["author@example.com"] + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![no_core] + + pub fn foo() -> u32 { + 42 + } + + #[lang = "sized"] + pub trait Sized { + // Empty. + } + #[lang = "copy"] + pub trait Copy { + // Empty. + } + "#, + ) + .file( + "custom-target.json", + r#" + { + "llvm-target": "x86_64-unknown-none-gnu", + "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", + "arch": "x86_64", + "target-endian": "little", + "target-pointer-width": "64", + "target-c-int-width": "32", + "os": "none", + "linker-flavor": "ld.lld" + } + "#, + ) + .build(); + + assert_that( + p.cargo("build") + .arg("--lib") + .arg("--target") + .arg("custom-target.json") + .arg("-v"), + execs().with_status(0), + ); + assert_that( + p.cargo("build") + .arg("--lib") + .arg("--target") + .arg("src/../custom-target.json") + .arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn custom_target_dependency() { + if !is_nightly() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["author@example.com"] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![feature(optin_builtin_traits)] + #![no_core] + + extern crate bar; + + pub fn foo() -> u32 { + bar::bar() + } + + #[lang = "freeze"] + unsafe auto trait Freeze {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.0.1" + authors = ["author@example.com"] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![no_core] + + pub fn bar() -> u32 { + 42 + } + + #[lang = "sized"] + pub trait Sized { + // Empty. + } + #[lang = "copy"] + pub trait Copy { + // Empty. + } + "#, + ) + .file( + "custom-target.json", + r#" + { + "llvm-target": "x86_64-unknown-none-gnu", + "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", + "arch": "x86_64", + "target-endian": "little", + "target-pointer-width": "64", + "target-c-int-width": "32", + "os": "none", + "linker-flavor": "ld.lld" + } + "#, + ) + .build(); + + assert_that( + p.cargo("build") + .arg("--lib") + .arg("--target") + .arg("custom-target.json") + .arg("-v"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/death.rs b/tests/testsuite/death.rs new file mode 100644 index 000000000..c54941991 --- /dev/null +++ b/tests/testsuite/death.rs @@ -0,0 +1,146 @@ +use std::fs; +use std::io::{self, Read}; +use std::net::TcpListener; +use std::process::{Child, Stdio}; +use std::thread; +use std::time::Duration; + +use cargotest::support::project; + +#[cfg(unix)] +fn enabled() -> bool { + true +} + +// On Windows support for these tests is only enabled through the usage of job +// objects. Support for nested job objects, however, was added in recent-ish +// versions of Windows, so this test may not always be able to succeed. +// +// As a result, we try to add ourselves to a job object here +// can succeed or not. +#[cfg(windows)] +fn enabled() -> bool { + use winapi::um::{handleapi, jobapi, processthreadsapi, jobapi2}; + + unsafe { + // If we're not currently in a job, then we can definitely run these + // tests. + let me = processthreadsapi::GetCurrentProcess(); + let mut ret = 0; + let r = jobapi::IsProcessInJob(me, 0 as *mut _, &mut ret); + assert_ne!(r, 0); + if ret == ::winapi::shared::minwindef::FALSE { + return true; + } + + // If we are in a job, then we can run these tests if we can be added to + // a nested job (as we're going to create a nested job no matter what as + // part of these tests. + // + // If we can't be added to a nested job, then these tests will + // definitely fail, and there's not much we can do about that. + let job = jobapi2::CreateJobObjectW(0 as *mut _, 0 as *const _); + assert!(!job.is_null()); + let r = jobapi2::AssignProcessToJobObject(job, me); + handleapi::CloseHandle(job); + r != 0 + } +} + +#[test] +fn ctrl_c_kills_everyone() { + if !enabled() { + return; + } + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + &format!( + r#" + use std::net::TcpStream; + use std::io::Read; + + fn main() {{ + let mut socket = TcpStream::connect("{}").unwrap(); + let _ = socket.read(&mut [0; 10]); + panic!("that read should never return"); + }} + "#, + addr + ), + ) + .build(); + + let mut cargo = p.cargo("build").build_command(); + cargo + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1"); + let mut child = cargo.spawn().unwrap(); + + let mut sock = listener.accept().unwrap().0; + ctrl_c(&mut child); + + assert!(!child.wait().unwrap().success()); + match sock.read(&mut [0; 10]) { + Ok(n) => assert_eq!(n, 0), + Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset), + } + + // Ok so what we just did was spawn cargo that spawned a build script, then + // we killed cargo in hopes of it killing the build script as well. If all + // went well the build script is now dead. On Windows, however, this is + // enforced with job objects which means that it may actually be in the + // *process* of being torn down at this point. + // + // Now on Windows we can't completely remove a file until all handles to it + // have been closed. Including those that represent running processes. So if + // we were to return here then there may still be an open reference to some + // file in the build directory. What we want to actually do is wait for the + // build script to *complete* exit. Take care of that by blowing away the + // build directory here, and panicking if we eventually spin too long + // without being able to. + for i in 0..10 { + match fs::remove_dir_all(&p.root().join("target")) { + Ok(()) => return, + Err(e) => println!("attempt {}: {}", i, e), + } + thread::sleep(Duration::from_millis(100)); + } + + panic!( + "couldn't remove build directory after a few tries, seems like \ + we won't be able to!" + ); +} + +#[cfg(unix)] +fn ctrl_c(child: &mut Child) { + use libc; + + let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) }; + if r < 0 { + panic!("failed to kill: {}", io::Error::last_os_error()); + } +} + +#[cfg(windows)] +fn ctrl_c(child: &mut Child) { + child.kill().unwrap(); +} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs new file mode 100644 index 000000000..f4bd01a28 --- /dev/null +++ b/tests/testsuite/dep_info.rs @@ -0,0 +1,126 @@ +use cargotest::support::{basic_bin_manifest, execs, main_file, project}; +use filetime::FileTime; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn build_dep_info() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let depinfo_bin_path = &p.bin("foo").with_extension("d"); + + assert_that(depinfo_bin_path, existing_file()); +} + +#[test] +fn build_dep_info_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that( + &p.example_lib("ex", "lib").with_extension("d"), + existing_file(), + ); +} + +#[test] +fn build_dep_info_rlib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that( + &p.example_lib("ex", "rlib").with_extension("d"), + existing_file(), + ); +} + +#[test] +fn build_dep_info_dylib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0)); + assert_that( + &p.example_lib("ex", "dylib").with_extension("d"), + existing_file(), + ); +} + +#[test] +fn no_rewrite_if_no_change() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let dep_info = p.root().join("target/debug/libfoo.d"); + let metadata1 = dep_info.metadata().unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); + let metadata2 = dep_info.metadata().unwrap(); + + assert_eq!( + FileTime::from_last_modification_time(&metadata1), + FileTime::from_last_modification_time(&metadata2), + ); +} diff --git a/tests/testsuite/directory.rs b/tests/testsuite/directory.rs new file mode 100644 index 000000000..e91efa470 --- /dev/null +++ b/tests/testsuite/directory.rs @@ -0,0 +1,815 @@ +use serde_json; +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::str; + +use cargotest::cargo_process; +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::{cksum, Package}; +use cargotest::support::{execs, project, ProjectBuilder}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "# + )); +} + +struct VendorPackage { + p: Option, + cksum: Checksum, +} + +#[derive(Serialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl VendorPackage { + fn new(name: &str) -> VendorPackage { + VendorPackage { + p: Some(project(&format!("index/{}", name))), + cksum: Checksum { + package: Some(String::new()), + files: HashMap::new(), + }, + } + } + + fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { + self.p = Some(self.p.take().unwrap().file(name, contents)); + self.cksum + .files + .insert(name.to_string(), cksum(contents.as_bytes())); + self + } + + fn disable_checksum(&mut self) -> &mut VendorPackage { + self.cksum.package = None; + self + } + + fn build(&mut self) { + let p = self.p.take().unwrap(); + let json = serde_json::to_string(&self.cksum).unwrap(); + let p = p.file(".cargo-checksum.json", &json); + let _ = p.build(); + } +} + +#[test] +fn simple() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn simple_install() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + cargo_process().arg("install").arg("bar"), + execs().with_status(0).with_stderr( + " Installing bar v0.1.0 + Compiling foo v0.1.0 + Compiling bar v0.1.0 + Finished release [optimized] target(s) in [..] secs + Installing [..]bar[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ), + ); +} + +#[test] +fn simple_install_fail() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + baz = "9.8.7" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + cargo_process().arg("install").arg("bar"), + execs().with_status(101).with_stderr( + " Installing bar v0.1.0 +error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]` + +Caused by: + no matching package named `baz` found +location searched: registry `https://github.com/rust-lang/crates.io-index` +required by package `bar v0.1.0` +", + ), + ); +} + +#[test] +fn install_without_feature_dep() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + baz = { version = "9.8.7", optional = true } + + [features] + wantbaz = ["baz"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + pub fn main() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + cargo_process().arg("install").arg("bar"), + execs().with_status(0).with_stderr( + " Installing bar v0.1.0 + Compiling foo v0.1.0 + Compiling bar v0.1.0 + Finished release [optimized] target(s) in [..] secs + Installing [..]bar[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ), + ); +} + +#[test] +fn not_there() { + setup(); + + let _ = project("index").build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: no matching package named `foo` found +location searched: [..] +required by package `bar v0.1.0 ([..])` +", + ), + ); +} + +#[test] +fn multiple() { + setup(); + + VendorPackage::new("foo-0.1.0") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + VendorPackage::new("foo-0.2.0") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn crates_io_then_directory() { + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + let cksum = Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn foo() -> u32 { 0 }") + .publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +", + ), + ); + + setup(); + + let mut v = VendorPackage::new("foo"); + v.file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ); + v.file("src/lib.rs", "pub fn foo() -> u32 { 1 }"); + v.cksum.package = Some(cksum); + v.build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn crates_io_then_bad_checksum() { + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("foo", "0.1.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +", + ), + ); +} + +#[test] +fn bad_file_checksum() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + let mut f = t!(File::create(paths::root().join("index/foo/src/lib.rs"))); + t!(f.write_all(b"fn foo() -> u32 { 0 }")); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: the listed checksum of `[..]lib.rs` has changed: +expected: [..] +actual: [..] + +directory sources are not intended to be edited, if modifications are \ +required then it is recommended that [replace] is used with a forked copy of \ +the source +", + ), + ); +} + +#[test] +fn only_dot_files_ok() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + VendorPackage::new("bar").file(".foo", "").build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn random_files_ok() { + setup(); + + VendorPackage::new("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + VendorPackage::new("bar") + .file("foo", "") + .file("../test", "") + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn git_lock_file_doesnt_change() { + let git = git::new("git", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "git" + version = "0.5.0" + authors = [] + "#, + ).file("src/lib.rs", "") + }).unwrap(); + + VendorPackage::new("git") + .file( + "Cargo.toml", + r#" + [package] + name = "git" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{0}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let mut lock1 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock1)); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!( + t!(File::create(root.join(".cargo/config"))).write_all(&format!( + r#" + [source.my-git-repo] + git = '{}' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#, + git.url() + ).as_bytes()) + ); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ), + ); + + let mut lock2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2)); + assert_eq!(lock1, lock2, "lock files changed"); +} + +#[test] +fn git_override_requires_lockfile() { + VendorPackage::new("git") + .file( + "Cargo.toml", + r#" + [package] + name = "git" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = { git = 'https://example.com/' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.my-git-repo] + git = 'https://example.com/' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "# + )); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `git` + +Caused by: + Unable to update [..] + +Caused by: + the source my-git-repo requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build + +", + ), + ); +} diff --git a/tests/testsuite/doc.rs b/tests/testsuite/doc.rs new file mode 100644 index 000000000..7b6ea41e3 --- /dev/null +++ b/tests/testsuite/doc.rs @@ -0,0 +1,1466 @@ +use cargotest; +use std::str; +use std::fs::{self, File}; +use std::io::Read; + +use cargotest::rustc_host; +use cargotest::support::{execs, project, path2url}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; +use cargo::util::ProcessError; + +#[test] +fn simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc"), + execs().with_status(0).with_stderr(&format!( + "\ +[..] foo v0.0.1 ({dir}) +[..] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn doc_no_libs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + doc = false + "#, + ) + .file( + "src/main.rs", + r#" + bad code + "#, + ) + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn doc_twice() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc"), + execs().with_status(0).with_stderr(&format!( + "\ +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + + assert_that(p.cargo("doc"), execs().with_status(0).with_stdout("")) +} + +#[test] +fn doc_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc"), + execs().with_status(0).with_stderr(&format!( + "\ +[..] bar v0.0.1 ({dir}/bar) +[..] bar v0.0.1 ({dir}/bar) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + + assert_that( + p.cargo("doc") + .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), + execs().with_status(0).with_stdout(""), + ); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); +} + +#[test] +fn doc_no_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("--no-deps"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + assert_that( + &p.root().join("target/doc/bar/index.html"), + is_not(existing_file()), + ); +} + +#[test] +fn doc_only_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that(p.cargo("doc").arg("-v"), execs().with_status(0)); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn doc_multiple_targets_same_name_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(101) + .with_stderr_contains("[..] library `foo_lib` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"), + ); +} + +#[test] +fn doc_multiple_targets_same_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo_lib" + path = "src/foo_lib.rs" + "#, + ) + .file("foo/src/foo_lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + let root = path2url(p.root()); + + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains(&format!("[DOCUMENTING] foo v0.1.0 ({}/foo)", root)) + .with_stderr_contains(&format!("[DOCUMENTING] bar v0.1.0 ({}/bar)", root)) + .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + let doc_file = p.root().join("target/doc/foo_lib/index.html"); + assert_that(&doc_file, existing_file()); +} + +#[test] +fn doc_multiple_targets_same_name_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("foo/src/foo-cli.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("bar/src/foo-cli.rs", "") + .build(); + + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(101) + .with_stderr_contains("[..] binary `foo_cli` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"), + ); +} + +#[test] +fn doc_multiple_targets_same_name_undoced() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("foo/src/foo-cli.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + doc = false + "#, + ) + .file("bar/src/foo-cli.rs", "") + .build(); + + assert_that(p.cargo("doc").arg("--all"), execs().with_status(0)); +} + +#[test] +fn doc_lib_bin_same_name_documents_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc"), + execs().with_status(0).with_stderr(&format!( + "\ +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert_that(&doc_file, existing_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(doc_html.contains("Library")); + assert!(!doc_html.contains("Binary")); +} + +#[test] +fn doc_lib_bin_same_name_documents_lib_when_requested() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("--lib"), + execs().with_status(0).with_stderr(&format!( + "\ +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert_that(&doc_file, existing_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(doc_html.contains("Library")); + assert!(!doc_html.contains("Binary")); +} + +#[test] +fn doc_lib_bin_same_name_documents_named_bin_when_requested() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("--bin").arg("foo"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert_that(&doc_file, existing_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(!doc_html.contains("Library")); + assert!(doc_html.contains("Binary")); +} + +#[test] +fn doc_lib_bin_same_name_documents_bins_when_requested() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("--bins"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[DOCUMENTING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert_that(&doc_file, existing_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(!doc_html.contains("Library")); + assert!(doc_html.contains("Binary")); +} + +#[test] +fn doc_dash_p() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "extern crate a;") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("doc").arg("-p").arg("a"), + execs().with_status(0).with_stderr( + "\ +[..] b v0.0.1 (file://[..]) +[..] b v0.0.1 (file://[..]) +[DOCUMENTING] a v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn doc_same_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .file("examples/main.rs", "fn main() {}") + .file("tests/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn doc_target() { + const TARGET: &'static str = "arm-unknown-linux-gnueabihf"; + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(no_core)] + #![no_core] + + extern { + pub static A: u32; + } + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("--target").arg(TARGET).arg("--verbose"), + execs().with_status(0), + ); + assert_that( + &p.root().join(&format!("target/{}/doc", TARGET)), + existing_dir(), + ); + assert_that( + &p.root() + .join(&format!("target/{}/doc/foo/index.html", TARGET)), + existing_file(), + ); +} + +#[test] +fn target_specific_not_documented() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "not rust") + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn output_not_captured() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "a/src/lib.rs", + " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ", + ) + .build(); + + let error = p.cargo("doc").exec_with_output().err().unwrap(); + if let Ok(perr) = error.downcast::() { + let output = perr.output.unwrap(); + let stderr = str::from_utf8(&output.stderr).unwrap(); + + assert!(stderr.contains("☃"), "no snowman\n{}", stderr); + assert!( + stderr.contains("unknown start of token"), + "no message{}", + stderr + ); + } else { + assert!( + false, + "an error kind other than ProcessErrorKind was encountered" + ); + } +} + +#[test] +fn target_specific_documented() { + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = {{ path = "a" }} + [target.{}.dependencies] + a = {{ path = "a" }} + "#, + rustc_host() + ), + ) + .file( + "src/lib.rs", + " + extern crate a; + + /// test + pub fn foo() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "a/src/lib.rs", + " + /// test + pub fn foo() {} + ", + ) + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn no_document_build_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [build-dependencies] + a = { path = "a" } + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "a/src/lib.rs", + " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ", + ) + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn doc_release() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--release"), execs().with_status(0)); + assert_that( + p.cargo("doc").arg("--release").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc [..] src[/]lib.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +#[test] +fn doc_multiple_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [dependencies.baz] + path = "baz" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "baz/src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + p.cargo("doc") + .arg("-p") + .arg("bar") + .arg("-p") + .arg("baz") + .arg("-v"), + execs().with_status(0), + ); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/baz/index.html"), existing_file()); +} + +#[test] +fn features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [features] + foo = ["bar/bar"] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "foo")] + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + bar = [] + "#, + ) + .file( + "bar/build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=bar"); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "bar")] + pub fn bar() {} + "#, + ) + .build(); + assert_that( + p.cargo("doc").arg("--features").arg("foo"), + execs().with_status(0), + ); + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that( + &p.root().join("target/doc/foo/fn.foo.html"), + existing_file(), + ); + assert_that( + &p.root().join("target/doc/bar/fn.bar.html"), + existing_file(), + ); +} + +#[test] +fn rerun_when_dir_removed() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// dox + pub fn foo() {} + "#, + ) + .build(); + + assert_that(p.cargo("doc"), execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); + + fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap(); + + assert_that(p.cargo("doc"), execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn document_only_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// dox + pub fn foo() {} + "#, + ) + .file( + "src/bin/bar.rs", + r#" + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + fn main() { foo(); } + "#, + ) + .build(); + assert_that(p.cargo("doc").arg("--lib"), execs().with_status(0)); + assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); +} + +#[test] +fn plugins_no_use_target() { + if !cargotest::is_nightly() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("doc") + .arg("--target=x86_64-unknown-openbsd") + .arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn doc_all_workspace() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + // The order in which bar is compiled or documented is not deterministic + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"), + ); +} + +#[test] +fn doc_all_virtual_manifest() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + // The order in which foo and bar are documented is not guaranteed + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"), + ); +} + +#[test] +fn doc_virtual_manifest_all_implied() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + // The order in which foo and bar are documented is not guaranteed + assert_that( + p.cargo("doc"), + execs() + .with_status(0) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"), + ); +} + +#[test] +fn doc_all_member_dependency_same_name() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + pub fn a() {} + "#, + ) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that( + p.cargo("doc").arg("--all"), + execs() + .with_status(0) + .with_stderr_contains("[..] Updating registry `[..]`") + .with_stderr_contains("[..] Documenting a v0.1.0 ([..])"), + ); +} + +#[test] +fn doc_workspace_open_help_message() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + // The order in which bar is compiled or documented is not deterministic + assert_that( + p.cargo("doc").arg("--all").arg("--open"), + execs() + .with_status(101) + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .with_stderr_contains( + "error: Passing multiple packages and `open` \ + is not supported.", + ) + .with_stderr_contains( + "Please re-run this command with `-p ` \ + where `` is one of the following:", + ) + .with_stderr_contains(" foo") + .with_stderr_contains(" bar"), + ); +} diff --git a/tests/testsuite/features.rs b/tests/testsuite/features.rs new file mode 100644 index 000000000..f877d3ea5 --- /dev/null +++ b/tests/testsuite/features.rs @@ -0,0 +1,1631 @@ +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::paths::CargoPathExt; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn invalid1() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `baz` which is neither a dependency nor another feature +", + ), + ); +} + +#[test] +fn invalid2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.bar] + path = "foo" + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Features and dependencies cannot have the same name: `bar` +", + ), + ); +} + +#[test] +fn invalid3() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.baz] + path = "foo" + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` depends on `baz` which is not an optional dependency. +Consider adding `optional = true` to the dependency +", + ), + ); +} + +#[test] +fn invalid4() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["bar"] + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to select a version for `bar`. + ... required by package `foo v0.0.1 ([..])` +versions that meet the requirements `*` are: 0.0.1 + +the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features. + + +failed to select a version for `bar` which could resolve this conflict", + ), + ); + + p.change_file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ); + + assert_that( + p.cargo("build").arg("--features").arg("test"), + execs() + .with_status(101) + .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `test`"), + ); +} + +#[test] +fn invalid5() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dev-dependencies are not allowed to be optional: `bar` +", + ), + ); +} + +#[test] +fn invalid6() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +", + ), + ); +} + +#[test] +fn invalid7() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + bar = [] + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +", + ), + ); +} + +#[test] +fn invalid8() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["foo/bar"] + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `foo/bar`"), + ); +} + +#[test] +fn invalid9() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("--features").arg("bar"), + execs().with_status(0).with_stderr("\ +warning: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with \ +that name, but only optional dependencies can be used as features. [..] + Compiling bar v0.0.1 ([..]) + Compiling foo v0.0.1 ([..]) + Finished dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn invalid10() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.baz] + path = "baz" + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "bar/baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +warning: Package `bar v0.0.1 ([..])` does not have feature `baz`. It has a required dependency with \ +that name, but only optional dependencies can be used as features. [..] + Compiling baz v0.0.1 ([..]) + Compiling bar v0.0.1 ([..]) + Compiling foo v0.0.1 ([..]) + Finished dev [unoptimized + debuginfo] target(s) in [..] secs +")); +} + +#[test] +fn no_transitive_dep_feature_requirement() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + + [features] + default = ["derived/bar/qux"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate derived; + fn main() { derived::test(); } + "#, + ) + .file( + "derived/Cargo.toml", + r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "derived/src/lib.rs", + r#" + extern crate bar; + pub use bar::test; + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + qux = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "qux")] + pub fn test() { print!("test"); } + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `bar/qux`"), + ); +} + +#[test] +fn no_feature_doesnt_build() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + p.process(&p.bin("foo")), + execs().with_status(0).with_stdout(""), + ); + + assert_that( + p.cargo("build").arg("--features").arg("bar"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("bar\n"), + ); +} + +#[test] +fn default_feature_pulled_in() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + p.process(&p.bin("foo")), + execs().with_status(0).with_stdout("bar\n"), + ); + + assert_that( + p.cargo("build").arg("--no-default-features"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + p.process(&p.bin("foo")), + execs().with_status(0).with_stdout(""), + ); +} + +#[test] +fn cyclic_feature() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["default"] + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("[ERROR] Cyclic feature dependency: feature `default` depends on itself"), + ); +} + +#[test] +fn cyclic_feature2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar"] + bar = ["foo"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn groups_on_groups_on_groups() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["f1"] + f1 = ["f2", "bar"] + f2 = ["f3", "f4"] + f3 = ["f5", "f6", "baz"] + f4 = ["f5", "f7"] + f5 = ["f6"] + f6 = ["f7"] + f7 = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn many_cli_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("bar baz"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn union_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + features = ["f1"] + [dependencies.d2] + path = "d2" + features = ["f2"] + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate d1; + extern crate d2; + fn main() { + d2::f1(); + d2::f2(); + } + "#, + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d2"] + + [dependencies.d2] + path = "../d2" + features = ["f1"] + optional = true + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + f2 = [] + "#, + ) + .file( + "d2/src/lib.rs", + r#" + #[cfg(feature = "f1")] pub fn f1() {} + #[cfg(feature = "f2")] pub fn f2() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] d2 v0.0.1 ({dir}/d2) +[COMPILING] d1 v0.0.1 ({dir}/d1) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn many_features_no_rebuilds() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies.a] + path = "a" + features = ["fall"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + ftest = [] + ftest2 = [] + fall = ["ftest", "ftest2"] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] a v0.1.0 ({dir}/a) +[COMPILING] b v0.1.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + p.root().move_into_the_past(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] a v0.1.0 ([..]/a) +[FRESH] b v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +// Tests that all cmd lines work with `--features ""` +#[test] +fn empty_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg(""), + execs().with_status(0), + ); +} + +// Tests that all cmd lines work with `--features ""` +#[test] +fn transitive_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + " + extern crate bar; + fn main() { bar::baz(); } + ", + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + baz = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "baz")] + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs().with_status(0), + ); +} + +#[test] +fn everything_in_the_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d1/f1"] + f2 = ["d2"] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + optional = true + [dependencies.d3] + path = "d3" + optional = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.2" + authors = [] + "#, + ) + .file("d2/src/lib.rs", "") + .file( + "d3/Cargo.toml", + r#" + [package] + name = "d3" + version = "0.0.3" + authors = [] + + [features] + f3 = [] + "#, + ) + .file("d3/src/lib.rs", "") + .build(); + + assert_that(p.cargo("fetch"), execs().with_status(0)); + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + t!(t!(File::open(&loc)).read_to_string(&mut lockfile)); + assert!( + lockfile.contains(r#"name = "d1""#), + "d1 not found\n{}", + lockfile + ); + assert!( + lockfile.contains(r#"name = "d2""#), + "d2 not found\n{}", + lockfile + ); + assert!( + lockfile.contains(r#"name = "d3""#), + "d3 not found\n{}", + lockfile + ); +} + +#[test] +fn no_rebuild_when_frobbing_default_feature() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = ["f1"], default-features = false } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn unions_work_with_no_default_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate a; + pub fn foo() { a::a(); } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = [], default-features = false } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#, + ) + .file( + "a/src/lib.rs", + r#" + #[cfg(feature = "f1")] + pub fn a() {} + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn optional_and_dev_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + [dev-dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] test v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn activating_feature_activates_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + + [features] + a = ["foo/a"] + "#, + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::bar(); + } + ", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + "#, + ) + .file( + "foo/src/lib.rs", + r#" + #[cfg(feature = "a")] + pub fn bar() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("a").arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn dep_feature_in_cmd_line() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate derived; + fn main() { derived::test(); } + "#, + ) + .file( + "derived/Cargo.toml", + r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [features] + default = [] + derived-feat = ["bar/some-feat"] + "#, + ) + .file( + "derived/src/lib.rs", + r#" + extern crate bar; + pub use bar::test; + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + some-feat = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "some-feat")] + pub fn test() { print!("test"); } + "#, + ) + .build(); + + // The foo project requires that feature "some-feat" in "bar" is enabled. + // Building without any features enabled should fail: + assert_that(p.cargo("build"), execs().with_status(101)); + + // We should be able to enable the feature "derived-feat", which enables "some-feat", + // on the command line. The feature is enabled, thus building should be successful: + assert_that( + p.cargo("build") + .arg("--features") + .arg("derived/derived-feat"), + execs().with_status(0), + ); + + // Trying to enable features of transitive dependencies is an error + assert_that( + p.cargo("build").arg("--features").arg("bar/some-feat"), + execs() + .with_status(101) + .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `bar`"), + ); + + // Hierarchical feature specification should still be disallowed + assert_that( + p.cargo("build") + .arg("--features") + .arg("derived/bar/some-feat"), + execs() + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `bar/some-feat`"), + ); +} + +#[test] +fn all_features_flag_enables_all_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = [] + bar = [] + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "foo")] + pub fn foo() {} + + #[cfg(feature = "bar")] + pub fn bar() { + extern crate baz; + baz::baz(); + } + + fn main() { + foo(); + bar(); + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that( + p.cargo("build").arg("--all-features"), + execs().with_status(0), + ); +} + +#[test] +fn many_cli_features_comma_delimited() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("bar,baz"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn many_cli_features_comma_and_space_delimited() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + + [dependencies.bam] + path = "bam" + optional = true + + [dependencies.bap] + path = "bap" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + #[allow(unused_extern_crates)] + extern crate bam; + #[allow(unused_extern_crates)] + extern crate bap; + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .file( + "bam/Cargo.toml", + r#" + [package] + name = "bam" + version = "0.0.1" + authors = [] + "#, + ) + .file("bam/src/lib.rs", "pub fn bam() {}") + .file( + "bap/Cargo.toml", + r#" + [package] + name = "bap" + version = "0.0.1" + authors = [] + "#, + ) + .file("bap/src/lib.rs", "pub fn bap() {}") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("bar,baz bam bap"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] ba[..] v0.0.1 ({dir}/ba[..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} diff --git a/tests/testsuite/fetch.rs b/tests/testsuite/fetch.rs new file mode 100644 index 000000000..5ddd8802d --- /dev/null +++ b/tests/testsuite/fetch.rs @@ -0,0 +1,26 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn no_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file( + "src/main.rs", + r#" + mod a; fn main() {} + "#, + ) + .file("src/a.rs", "") + .build(); + + assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout("")); +} diff --git a/tests/testsuite/freshness.rs b/tests/testsuite/freshness.rs new file mode 100644 index 000000000..3b94f5f05 --- /dev/null +++ b/tests/testsuite/freshness.rs @@ -0,0 +1,1124 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::sleep_ms; +use cargotest::support::{execs, project, path2url}; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn modifying_and_moving() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file( + "src/main.rs", + r#" + mod a; fn main() {} + "#, + ) + .file("src/a.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + p.root().move_into_the_past(); + p.root().join("target").move_into_the_past(); + + File::create(&p.root().join("src/a.rs")) + .unwrap() + .write_all(b"#[allow(unused)]fn main() {}") + .unwrap(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + + fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn modify_only_some_files() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "mod a;") + .file("src/a.rs", "") + .file( + "src/main.rs", + r#" + mod b; + fn main() {} + "#, + ) + .file("src/b.rs", "") + .file("tests/test.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(p.cargo("test"), execs().with_status(0)); + sleep_ms(1000); + + assert_that(&p.bin("foo"), existing_file()); + + let lib = p.root().join("src/lib.rs"); + let bin = p.root().join("src/b.rs"); + + File::create(&lib) + .unwrap() + .write_all(b"invalid rust code") + .unwrap(); + File::create(&bin) + .unwrap() + .write_all(b"#[allow(unused)]fn foo() {}") + .unwrap(); + lib.move_into_the_past(); + + // Make sure the binary is rebuilt, not the lib + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = path2url(p.root()) + )), + ); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn rebuild_sub_package_then_while_package() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.a] + path = "a" + [dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "extern crate a; extern crate b;") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + authors = [] + version = "0.0.1" + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + authors = [] + version = "0.0.1" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + File::create(&p.root().join("b/src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn b() {} + "#, + ) + .unwrap(); + + assert_that(p.cargo("build").arg("-pb"), execs().with_status(0)); + + File::create(&p.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + extern crate a; + extern crate b; + pub fn toplevel() {} + "#, + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn changing_lib_features_caches_targets() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + /* Targets should be cached from the first build */ + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + + assert_that( + p.cargo("build").arg("--features").arg("foo"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); +} + +#[test] +fn changing_profiles_caches_targets() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [profile.dev] + panic = "abort" + + [profile.test] + panic = "unwind" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("test"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +[DOCTEST] foo +", + ), + ); + + /* Targets should be cached from the first build */ + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + + assert_that( + p.cargo("test").arg("foo"), + execs().with_status(0).with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +[DOCTEST] foo +", + ), + ); +} + +#[test] +fn changing_bin_paths_common_target_features_caches_targets() { + // Make sure dep_cache crate is built once per feature + let p = project("foo") + .file( + ".cargo/config", + r#" + [build] + target-dir = "./target" + "#, + ) + .file( + "dep_crate/Cargo.toml", + r#" + [package] + name = "dep_crate" + version = "0.0.1" + authors = [] + + [features] + ftest = [] + "#, + ) + .file( + "dep_crate/src/lib.rs", + r#" + #[cfg(feature = "ftest")] + pub fn yo() { + println!("ftest on") + } + #[cfg(not(feature = "ftest"))] + pub fn yo() { + println!("ftest off") + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = []} + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/src/main.rs", + r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = ["ftest"]} + "#, + ) + .file("b/src/lib.rs", "") + .file( + "b/src/main.rs", + r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#, + ) + .build(); + + /* Build and rebuild a/. Ensure dep_crate only builds once */ + assert_that( + p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0).with_stdout("ftest off").with_stderr( + "\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +", + ), + ); + assert_that( + p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")), + execs().with_status(0), + ); + assert_that( + p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0).with_stdout("ftest off").with_stderr( + "\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +", + ), + ); + + /* Build and rebuild b/. Ensure dep_crate only builds once */ + assert_that( + p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0).with_stdout("ftest on").with_stderr( + "\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +", + ), + ); + assert_that( + p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")), + execs().with_status(0), + ); + assert_that( + p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0).with_stdout("ftest on").with_stderr( + "\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +", + ), + ); + + /* Build a/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild of dep_crate */ + assert_that( + p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")), + execs().with_status(0), + ); + assert_that( + p.cargo("run").cwd(p.root().join("a")), + execs().with_status(0).with_stdout("ftest off").with_stderr( + "\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]a[EXE]` +", + ), + ); + + /* Build b/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild */ + assert_that( + p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")), + execs().with_status(0), + ); + assert_that( + p.cargo("run").cwd(p.root().join("b")), + execs().with_status(0).with_stdout("ftest on").with_stderr( + "\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]b[EXE]` +", + ), + ); +} + +#[test] +fn changing_bin_features_caches_targets() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" }; + println!("{}", msg); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stdout("feature off") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +", + ), + ); + + assert_that( + p.cargo("run").arg("--features").arg("foo"), + execs() + .with_status(0) + .with_stdout("feature on") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +", + ), + ); + + /* Targets should be cached from the first build */ + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stdout("feature off") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +", + ), + ); + + assert_that( + p.cargo("run").arg("--features").arg("foo"), + execs() + .with_status(0) + .with_stdout("feature on") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]` +", + ), + ); +} + +#[test] +fn rebuild_tests_if_lib_changes() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "tests/foo.rs", + r#" + extern crate foo; + #[test] + fn test() { foo::foo(); } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("test"), execs().with_status(0)); + + sleep_ms(1000); + File::create(&p.root().join("src/lib.rs")).unwrap(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(p.cargo("test").arg("-v"), execs().with_status(101)); +} + +#[test] +fn no_rebuild_transitive_target_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + c = { path = "../c" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("test").arg("--no-run"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] c v0.0.1 ([..]) +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn rerun_if_changed_in_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + } + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn same_build_dir_cached_packages() { + let p = project("foo") + .file( + "a1/Cargo.toml", + r#" + [package] + name = "a1" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#, + ) + .file("a1/src/lib.rs", "") + .file( + "a2/Cargo.toml", + r#" + [package] + name = "a2" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#, + ) + .file("a2/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + [dependencies] + d = { path = "../d" } + "#, + ) + .file("c/src/lib.rs", "") + .file( + "d/Cargo.toml", + r#" + [package] + name = "d" + version = "0.0.1" + authors = [] + "#, + ) + .file("d/src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + target-dir = "./target" + "#, + ) + .build(); + + assert_that( + p.cargo("build").cwd(p.root().join("a1")), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] d v0.0.1 ({dir}/d) +[COMPILING] c v0.0.1 ({dir}/c) +[COMPILING] b v0.0.1 ({dir}/b) +[COMPILING] a1 v0.0.1 ({dir}/a1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + p.cargo("build").cwd(p.root().join("a2")), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] a2 v0.0.1 ({dir}/a2) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn no_rebuild_if_build_artifacts_move_backwards_in_time() { + let p = project("backwards_in_time") + .file( + "Cargo.toml", + r#" + [package] + name = "backwards_in_time" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + p.root().move_into_the_past(); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stdout("") + .with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn rebuild_if_build_artifacts_move_forward_in_time() { + let p = project("forwards_in_time") + .file( + "Cargo.toml", + r#" + [package] + name = "forwards_in_time" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + p.root().move_into_the_future(); + + assert_that( + p.cargo("build").env("RUST_LOG", ""), + execs().with_status(0).with_stdout("").with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[COMPILING] forwards_in_time v0.0.1 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn rebuild_if_environment_changes() { + let p = project("env_change") + .file( + "Cargo.toml", + r#" + [package] + name = "env_change" + description = "old desc" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!("{}", env!("CARGO_PKG_DESCRIPTION")); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stdout("old desc") + .with_stderr(&format!( + "\ +[COMPILING] env_change v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]env_change[EXE]` +", + dir = p.url() + )), + ); + + File::create(&p.root().join("Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "env_change" + description = "new desc" + version = "0.0.1" + authors = [] + "#, + ) + .unwrap(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stdout("new desc") + .with_stderr(&format!( + "\ +[COMPILING] env_change v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]env_change[EXE]` +", + dir = p.url() + )), + ); +} + +#[test] +fn no_rebuild_when_rename_dir() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let mut new = p.root(); + new.pop(); + new.push("bar"); + fs::rename(p.root(), &new).unwrap(); + + assert_that( + p.cargo("build").cwd(&new), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); +} + +#[test] +fn unused_optional_dep() { + Package::new("registry1", "0.1.0").publish(); + Package::new("registry2", "0.1.0").publish(); + Package::new("registry3", "0.1.0").publish(); + + let p = project("p") + .file( + "Cargo.toml", + r#" + [package] + name = "p" + authors = [] + version = "0.1.0" + + [dependencies] + foo = { path = "foo" } + bar = { path = "bar" } + registry1 = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + + [dev-dependencies] + registry2 = "*" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + authors = [] + + [dependencies] + registry3 = { version = "*", optional = true } + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn path_dev_dep_registry_updates() { + Package::new("registry1", "0.1.0").publish(); + Package::new("registry2", "0.1.0").publish(); + + let p = project("p") + .file( + "Cargo.toml", + r#" + [package] + name = "p" + authors = [] + version = "0.1.0" + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + + [dependencies] + registry1 = "*" + + [dev-dependencies] + bar = { path = "../bar"} + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + authors = [] + + [dependencies] + registry2 = "*" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} diff --git a/tests/testsuite/generate_lockfile.rs b/tests/testsuite/generate_lockfile.rs new file mode 100644 index 000000000..6018904f0 --- /dev/null +++ b/tests/testsuite/generate_lockfile.rs @@ -0,0 +1,252 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use cargotest::ChannelChanger; +use hamcrest::{assert_that, existing_file, is_not}; + +#[test] +fn adding_and_removing_packages() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + + let toml = p.root().join("Cargo.toml"); + let lock1 = p.read_lockfile(); + + // add a dep + File::create(&toml) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.bar] + path = "bar" + "#, + ) + .unwrap(); + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + let lock2 = p.read_lockfile(); + assert_ne!(lock1, lock2); + + // change the dep + File::create(&p.root().join("bar/Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "bar" + authors = [] + version = "0.0.2" + "#, + ) + .unwrap(); + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + let lock3 = p.read_lockfile(); + assert_ne!(lock1, lock3); + assert_ne!(lock2, lock3); + + // remove the dep + println!("lock4"); + File::create(&toml) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .unwrap(); + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + let lock4 = p.read_lockfile(); + assert_eq!(lock1, lock4); +} + +#[test] +fn no_index_update() { + Package::new("serde", "1.0.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies] + serde = "1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("generate-lockfile"), + execs().with_stderr("[UPDATING] registry `[..]`"), + ); + + assert_that( + p.cargo("generate-lockfile") + .masquerade_as_nightly_cargo() + .arg("-Zno-index-update"), + execs().with_status(0).with_stdout("").with_stderr(""), + ); +} + +#[test] +fn preserve_metadata() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + + let metadata = r#" +[metadata] +bar = "baz" +foo = "bar" +"#; + let lockfile = p.root().join("Cargo.lock"); + let lock = p.read_lockfile(); + let data = lock + metadata; + File::create(&lockfile) + .unwrap() + .write_all(data.as_bytes()) + .unwrap(); + + // Build and make sure the metadata is still there + assert_that(p.cargo("build"), execs().with_status(0)); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); + + // Update and make sure the metadata is still there + assert_that(p.cargo("update"), execs().with_status(0)); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); +} + +#[test] +fn preserve_line_endings_issue_2076() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + let lockfile = p.root().join("Cargo.lock"); + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(&lockfile, existing_file()); + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + + let lock0 = p.read_lockfile(); + + assert!(lock0.starts_with("[[package]]\n")); + + let lock1 = lock0.replace("\n", "\r\n"); + { + File::create(&lockfile) + .unwrap() + .write_all(lock1.as_bytes()) + .unwrap(); + } + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + + let lock2 = p.read_lockfile(); + + assert!(lock2.starts_with("[[package]]\r\n")); + assert_eq!(lock1, lock2); +} + +#[test] +fn cargo_update_generate_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let lockfile = p.root().join("Cargo.lock"); + assert_that(&lockfile, is_not(existing_file())); + assert_that(p.cargo("update"), execs().with_status(0).with_stdout("")); + assert_that(&lockfile, existing_file()); + + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + + assert_that(&lockfile, is_not(existing_file())); + assert_that(p.cargo("update"), execs().with_status(0).with_stdout("")); + assert_that(&lockfile, existing_file()); +} diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs new file mode 100644 index 000000000..d764cc2a3 --- /dev/null +++ b/tests/testsuite/git.rs @@ -0,0 +1,3202 @@ +use git2; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::net::{TcpListener, TcpStream}; +use std::path::Path; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::thread; + +use cargo::util::process; +use cargotest::sleep_ms; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::{execs, git, main_file, project, path2url}; +use cargotest::ChannelChanger; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn cargo_compile_simple_git_dep() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that( + project.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root) + )), + ); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello world\n"), + ); +} + +#[test] +fn cargo_compile_forbird_git_httpsrepo_offline() { + let p = project("need_remote_repo") + .file( + "Cargo.toml", + r#" + + [project] + name = "need_remote_repo" + version = "0.5.0" + authors = ["chabapok@example.com"] + + [dependencies.dep1] + git = 'https://github.com/some_user/dep1.git' + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"), + execs().with_status(101). + with_stderr("\ +error: failed to load source for a dependency on `dep1` + +Caused by: + Unable to update https://github.com/some_user/dep1.git + +Caused by: + can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (-Z offline)")); +} + +#[test] +fn cargo_compile_offline_with_cached_git_dep() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "dep1" + version = "0.5.0" + authors = ["chabapok@example.com"] + + [lib] + name = "dep1""#, + ) + .file( + "src/lib.rs", + r#" + pub static COOL_STR:&str = "cached git repo rev1"; + "#, + ) + }).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Commit the changes and make sure we trigger a recompile + File::create(&git_project.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub static COOL_STR:&str = "cached git repo rev2"; + "#, + ) + .unwrap(); + git::add(&repo); + let rev2 = git::commit(&repo); + + { + // cache to regisrty rev1 and rev2 + let prj = project("cache_git_dep") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "cache_git_dep" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev1.clone() + ), + ) + .file("src/main.rs", "fn main(){}") + .build(); + assert_that(prj.cargo("build"), execs().with_status(0)); + + File::create(&prj.root().join("Cargo.toml")) + .unwrap() + .write_all(&format!( + r#" + [project] + name = "cache_git_dep" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev2.clone() + ).as_bytes()) + .unwrap(); + assert_that(prj.cargo("build"), execs().with_status(0)); + } + + let project = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]), + ) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that( + project + .cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs().with_stderr(format!( + "\ +[COMPILING] dep1 v0.5.0 ({}#[..]) +[COMPILING] foo v0.5.0 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + path2url(git_root), + path2url(root) + )), + ); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello from cached git repo rev2\n"), + ); + + drop( + File::create(&project.root().join("Cargo.toml")) + .unwrap() + .write_all(&format!( + r#" + [project] + name = "foo" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev1 + ).as_bytes()) + .unwrap(), + ); + + let _out = project + .cargo("build") + .masquerade_as_nightly_cargo() + .arg("-Zoffline") + .exec_with_output(); + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello from cached git repo rev1\n"), + ); +} + +#[test] +fn cargo_compile_git_dep_branch() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }).unwrap(); + + // Make a new branch based on the current HEAD commit + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + repo.branch("branchy", &head, true).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + branch = "branchy" + + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that( + project.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root) + )), + ); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello world\n"), + ); +} + +#[test] +fn cargo_compile_git_dep_tag() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }).unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag( + "v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false, + ).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + tag = "v0.1.0" + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let root = project.root(); + let git_root = git_project.root(); + + assert_that( + project.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(git_root.clone()), + path2url(git_root), + path2url(root) + )), + ); + + assert_that(&project.bin("foo"), existing_file()); + + assert_that( + process(&project.bin("foo")), + execs().with_stdout("hello world\n"), + ); + + assert_that(project.cargo("build"), execs().with_status(0)); +} + +#[test] +fn cargo_compile_with_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [dependencies.dep2] + + version = "0.5.0" + path = "vendor/dep2" + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + extern crate dep2; + + pub fn hello() -> &'static str { + dep2::hello() + } + "#, + ) + .file( + "vendor/dep2/Cargo.toml", + r#" + [project] + + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep2" + "#, + ) + .file( + "vendor/dep2/src/dep2.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }).unwrap(); + + let p = project("parent") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, + git_project.url() + ), + ) + .file( + "src/parent.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that( + process(&p.bin("parent")), + execs().with_stdout("hello world\n"), + ); +} + +#[test] +fn cargo_compile_with_malformed_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + .file( + "vendor/dep2/Cargo.toml", + r#" + !INVALID! + "#, + ) + }).unwrap(); + + let p = project("parent") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, + git_project.url() + ), + ) + .file( + "src/parent.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that( + process(&p.bin("parent")), + execs().with_stdout("hello world\n"), + ); +} + +#[test] +fn cargo_compile_with_meta_package() { + let git_project = git::new("meta-dep", |project| { + project + .file( + "dep1/Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "dep1/src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "this is dep1" + } + "#, + ) + .file( + "dep2/Cargo.toml", + r#" + [project] + + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + + name = "dep2" + "#, + ) + .file( + "dep2/src/dep2.rs", + r#" + pub fn hello() -> &'static str { + "this is dep2" + } + "#, + ) + }).unwrap(); + + let p = project("parent") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "parent" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [dependencies.dep2] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "parent" + "#, + git_project.url(), + git_project.url() + ), + ) + .file( + "src/parent.rs", + &main_file( + r#""{} {}", dep1::hello(), dep2::hello()"#, + &["dep1", "dep2"], + ), + ) + .build(); + + p.cargo("build").exec_with_output().unwrap(); + + assert_that(&p.bin("parent"), existing_file()); + + assert_that( + process(&p.bin("parent")), + execs().with_stdout("this is dep1 this is dep2\n"), + ); +} + +#[test] +fn cargo_compile_with_short_ssh_git() { + let url = "git@github.com:a/dep"; + + let project = project("project") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep] + + git = "{}" + + [[bin]] + + name = "foo" + "#, + url + ), + ) + .file( + "src/foo.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + assert_that( + project.cargo("build"), + execs().with_stdout("").with_stderr(&format!( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + invalid url `{}`: relative URL without a base +", + url + )), + ); +} + +#[test] +fn two_revs_same_deps() { + let bar = git::new("meta-dep", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + let repo = git2::Repository::open(&bar.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Commit the changes and make sure we trigger a recompile + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() -> i32 { 2 } + "#, + ) + .unwrap(); + git::add(&repo); + let rev2 = git::commit(&repo); + + let foo = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + + [dependencies.baz] + path = "../baz" + "#, + bar.url(), + rev1 + ), + ) + .file( + "src/main.rs", + r#" + extern crate bar; + extern crate baz; + + fn main() { + assert_eq!(bar::bar(), 1); + assert_eq!(baz::baz(), 2); + } + "#, + ) + .build(); + + let _baz = project("baz") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "baz" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + "#, + bar.url(), + rev2 + ), + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn baz() -> i32 { bar::bar() } + "#, + ) + .build(); + + assert_that(foo.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(&foo.bin("foo"), existing_file()); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); +} + +#[test] +fn recompilation() { + let git_project = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + name = "bar" + "#, + ) + .file( + "src/bar.rs", + r#" + pub fn bar() {} + "#, + ) + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"])) + .build(); + + // First time around we should compile both foo and bar + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + git_project.url(), + p.url() + )), + ); + + // Don't recompile the second time + assert_that(p.cargo("build"), execs().with_stdout("")); + + // Modify a file manually, shouldn't trigger a recompile + File::create(&git_project.root().join("src/bar.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() { println!("hello!"); } + "#, + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_stdout("")); + + assert_that( + p.cargo("update"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`", + git_project.url() + )), + ); + + assert_that(p.cargo("build"), execs().with_stdout("")); + + // Commit the changes and make sure we don't trigger a recompile because the + // lockfile says not to change + let repo = git2::Repository::open(&git_project.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + println!("compile after commit"); + assert_that(p.cargo("build"), execs().with_stdout("")); + p.root().move_into_the_past(); + + // Update the dependency and carry on! + assert_that( + p.cargo("update"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )), + ); + println!("going for the last compile"); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + p.url() + )), + ); + + // Make sure clean only cleans one dep + assert_that( + p.cargo("clean").arg("-p").arg("foo"), + execs().with_stdout(""), + ); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url() + )), + ); +} + +#[test] +fn update_with_shared_deps() { + let git_project = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [lib] + name = "bar" + "#, + ) + .file( + "src/bar.rs", + r#" + pub fn bar() {} + "#, + ) + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + path = "dep1" + [dependencies.dep2] + path = "dep2" + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate dep1; + #[allow(unused_extern_crates)] + extern crate dep2; + fn main() {} + "#, + ) + .file( + "dep1/Cargo.toml", + &format!( + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("dep1/src/lib.rs", "") + .file( + "dep2/Cargo.toml", + &format!( + r#" + [package] + name = "dep2" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("dep2/src/lib.rs", "") + .build(); + + // First time around we should compile both foo and bar + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "\ +[UPDATING] git repository `{git}` +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + git = git_project.url(), + dir = p.url() + )), + ); + + // Modify a file manually, and commit it + File::create(&git_project.root().join("src/bar.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() { println!("hello!"); } + "#, + ) + .unwrap(); + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let old_head = repo.head().unwrap().target().unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + // By default, not transitive updates + println!("dep1 update"); + assert_that( + p.cargo("update").arg("-p").arg("dep1"), + execs().with_stdout(""), + ); + + // Don't do anything bad on a weird --precise argument + println!("bar bad precise update"); + assert_that( + p.cargo("update") + .arg("-p") + .arg("bar") + .arg("--precise") + .arg("0.1.2"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] git repository [..] +[ERROR] Unable to update [..] + +Caused by: + revspec '0.1.2' not found; [..] +", + ), + ); + + // Specifying a precise rev to the old rev shouldn't actually update + // anything because we already have the rev in the db. + println!("bar precise update"); + assert_that( + p.cargo("update") + .arg("-p") + .arg("bar") + .arg("--precise") + .arg(&old_head.to_string()), + execs().with_stdout(""), + ); + + // Updating aggressively should, however, update the repo. + println!("dep1 aggressive update"); + assert_that( + p.cargo("update").arg("-p").arg("dep1").arg("--aggressive"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )), + ); + + // Make sure we still only compile one version of the git repo + println!("build"); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "\ +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ({dir}[..]dep[..]) +[COMPILING] [..] v0.5.0 ({dir}[..]dep[..]) +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + git = git_project.url(), + dir = p.url() + )), + ); + + // We should be able to update transitive deps + assert_that( + p.cargo("update").arg("-p").arg("bar"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`", + git_project.url() + )), + ); +} + +#[test] +fn dep_with_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project.file( + "Cargo.toml", + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#, + ) + }).unwrap(); + let git_project2 = + git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate dep1; + pub fn foo() { dep1::dep() } + ", + ) + .build(); + + assert_that( + project.cargo("build"), + execs() + .with_stderr( + "\ +[UPDATING] git repository [..] +[COMPILING] dep1 [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .with_status(0), + ); +} + +#[test] +fn dep_with_bad_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project.file( + "Cargo.toml", + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#, + ) + }).unwrap(); + let git_project2 = + git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + // now amend the first commit on git_project2 to make submodule ref point to not-found + // commit + let repo = git2::Repository::open(&git_project2.root()).unwrap(); + let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap(); + let commit = repo.find_commit(original_submodule_ref).unwrap(); + commit + .amend( + Some("refs/heads/master"), + None, + None, + None, + Some("something something"), + None, + ) + .unwrap(); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate dep1; + pub fn foo() { dep1::dep() } + ", + ) + .build(); + + let expected = format!( + "\ +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `dep1` + +Caused by: + Unable to update {} + +Caused by: + failed to update submodule `src` + +Caused by: + object not found - no match for id [..] +", + path2url(git_project.root()) + ); + + assert_that( + p.cargo("build"), + execs().with_stderr(expected).with_status(101), + ); +} + +#[test] +fn two_deps_only_update_one() { + let project = project("foo"); + let git1 = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + let git2 = git::new("dep2", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "dep2" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + git = '{}' + [dependencies.dep2] + git = '{}' + "#, + git1.url(), + git2.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `[..]`\n\ + [UPDATING] git repository `[..]`\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + p.url() + )), + ); + + File::create(&git1.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn foo() {} + "#, + ) + .unwrap(); + let repo = git2::Repository::open(&git1.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + assert_that( + p.cargo("update").arg("-p").arg("dep1"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", + git1.url() + )), + ); +} + +#[test] +fn stale_cached_version() { + let bar = git::new("meta-dep", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + // Update the git database in the cache with the current state of the git + // repo + let foo = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file( + "src/main.rs", + r#" + extern crate bar; + + fn main() { assert_eq!(bar::bar(), 1) } + "#, + ) + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); + + // Update the repo, and simulate someone else updating the lockfile and then + // us pulling it down. + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() -> i32 { 1 + 0 } + "#, + ) + .unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + let rev = repo.revparse_single("HEAD").unwrap().id(); + + File::create(&foo.root().join("Cargo.lock")) + .unwrap() + .write_all( + format!( + r#" + [[package]] + name = "foo" + version = "0.0.0" + dependencies = [ + 'bar 0.0.0 (git+{url}#{hash})' + ] + + [[package]] + name = "bar" + version = "0.0.0" + source = 'git+{url}#{hash}' + "#, + url = bar.url(), + hash = rev + ).as_bytes(), + ) + .unwrap(); + + // Now build! + assert_that( + foo.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] bar v0.0.0 ({bar}#[..]) +[COMPILING] foo v0.0.0 ({foo}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url(), + foo = foo.url() + )), + ); + assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); +} + +#[test] +fn dep_with_changed_submodule() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project.file( + "Cargo.toml", + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + "#, + ) + }).unwrap(); + + let git_project2 = git::new("dep2", |project| { + project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") + }).unwrap(); + + let git_project3 = git::new("dep3", |project| { + project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") + }).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src")); + git::commit(&repo); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + [dependencies.dep1] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + " + extern crate dep1; + pub fn main() { println!(\"{}\", dep1::dep()) } + ", + ) + .build(); + + println!("first run"); + assert_that( + p.cargo("run"), + execs() + .with_stderr( + "[UPDATING] git repository `[..]`\n\ + [COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target[/]debug[/]foo[EXE]`\n", + ) + .with_stdout("project2\n") + .with_status(0), + ); + + File::create(&git_project.root().join(".gitmodules")) + .unwrap() + .write_all( + format!( + "[submodule \"src\"]\n\tpath = src\n\turl={}", + git_project3.url() + ).as_bytes(), + ) + .unwrap(); + + // Sync the submodule and reset it to the new remote. + sub.sync().unwrap(); + { + let subrepo = sub.open().unwrap(); + subrepo + .remote_add_fetch("origin", "refs/heads/*:refs/heads/*") + .unwrap(); + subrepo + .remote_set_url("origin", &git_project3.url().to_string()) + .unwrap(); + let mut origin = subrepo.find_remote("origin").unwrap(); + origin.fetch(&[], None, None).unwrap(); + let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); + let obj = subrepo.find_object(id, None).unwrap(); + subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); + } + sub.add_to_index(true).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + // Update the dependency and carry on! + println!("update"); + assert_that( + p.cargo("update").arg("-v"), + execs().with_stderr("").with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )), + ); + + println!("last run"); + assert_that( + p.cargo("run"), + execs() + .with_stderr( + "[COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target[/]debug[/]foo[EXE]`\n", + ) + .with_stdout("project3\n") + .with_status(0), + ); +} + +#[test] +fn dev_deps_with_testing() { + let p2 = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#, + ) + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + p2.url() + ), + ) + .file( + "src/main.rs", + r#" + fn main() {} + + #[cfg(test)] + mod tests { + extern crate bar; + #[test] fn foo() { bar::gimme(); } + } + "#, + ) + .build(); + + // Generate a lockfile which did not use `bar` to compile, but had to update + // `bar` to generate the lockfile + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = p.url(), + bar = p2.url() + )), + ); + + // Make sure we use the previous resolution of `bar` instead of updating it + // a second time. + assert_that( + p.cargo("test"), + execs() + .with_stderr( + "\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + ) + .with_stdout_contains("test tests::foo ... ok"), + ); +} + +#[test] +fn git_build_cmd_freshness() { + let foo = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file( + ".gitignore", + " + src/bar.rs + ", + ) + }).unwrap(); + foo.root().move_into_the_past(); + + sleep_ms(1000); + + assert_that( + foo.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + assert_that(foo.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn git_name_not_always_needed() { + let p2 = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#, + ) + }).unwrap(); + + let repo = git2::Repository::open(&p2.root()).unwrap(); + let mut cfg = repo.config().unwrap(); + let _ = cfg.remove("user.name"); + let _ = cfg.remove("user.email"); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + git = '{}' + "#, + p2.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Generate a lockfile which did not use `bar` to compile, but had to update + // `bar` to generate the lockfile + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = p.url(), + bar = p2.url() + )), + ); +} + +#[test] +fn git_repo_changing_no_rebuild() { + let bar = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + // Lock p1 to the first rev in the git repo + let p1 = project("p1") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + build = 'build.rs' + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "fn main() {}") + .build(); + p1.root().move_into_the_past(); + assert_that( + p1.cargo("build"), + execs().with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url() + )), + ); + + // Make a commit to lock p2 to a different rev + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() -> i32 { 2 } + "#, + ) + .unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + // Lock p2 to the second rev + let p2 = project("p2") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "p2" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p2.cargo("build"), + execs().with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url() + )), + ); + + // And now for the real test! Make sure that p1 doesn't get rebuilt + // even though the git repo has changed. + assert_that(p1.cargo("build"), execs().with_stdout("")); +} + +#[test] +fn git_dep_build_cmd() { + let p = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#, + ) + .file( + "bar/src/bar.rs.in", + r#" + pub fn gimme() -> i32 { 0 } + "#, + ) + .file( + "bar/build.rs", + r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#, + ) + }).unwrap(); + + p.root().join("bar").move_into_the_past(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(process(&p.bin("foo")), execs().with_stdout("0\n")); + + // Touching bar.rs.in should cause the `build` command to run again. + fs::File::create(&p.root().join("bar/src/bar.rs.in")) + .unwrap() + .write_all(b"pub fn gimme() -> i32 { 1 }") + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(process(&p.bin("foo")), execs().with_stdout("1\n")); +} + +#[test] +fn fetch_downloads() { + let bar = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }).unwrap(); + + let p = project("p1") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("fetch"), + execs().with_status(0).with_stderr(&format!( + "[UPDATING] git repository `{url}`", + url = bar.url() + )), + ); + + assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn warnings_in_git_dep() { + let bar = git::new("bar", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "fn unused() {}") + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + bar.url(), + bar.url(), + p.url() + )), + ); +} + +#[test] +fn update_ambiguous() { + let foo1 = git::new("foo1", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + let foo2 = git::new("foo2", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.6.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + let bar = git::new("bar", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.foo] + git = '{}' + "#, + foo2.url() + ), + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.foo] + git = '{}' + [dependencies.bar] + git = '{}' + "#, + foo1.url(), + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that( + p.cargo("update").arg("-p").arg("foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] There are multiple `foo` packages in your project, and the specification `foo` \ +is ambiguous. +Please re-run this command with `-p ` where `` is one of the \ +following: + foo:0.[..].0 + foo:0.[..].0 +", + ), + ); +} + +#[test] +fn update_one_dep_in_repo_with_many_deps() { + let foo = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("a/src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.foo] + git = '{}' + [dependencies.a] + git = '{}' + "#, + foo.url(), + foo.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that( + p.cargo("update").arg("-p").arg("foo"), + execs() + .with_status(0) + .with_stderr(&format!("[UPDATING] git repository `{}`", foo.url())), + ); +} + +#[test] +fn switch_deps_does_not_update_transitive() { + let transitive = git::new("transitive", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "transitive" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + let dep1 = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, + transitive.url() + ), + ) + .file("src/lib.rs", "") + }).unwrap(); + let dep2 = git::new("dep2", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, + transitive.url() + ), + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep1.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] git repository `{}` +[UPDATING] git repository `{}` +[COMPILING] transitive [..] +[COMPILING] dep [..] +[COMPILING] project [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dep1.url(), + transitive.url() + )), + ); + + // Update the dependency to point to the second repository, but this + // shouldn't update the transitive dependency which is the same. + File::create(&p.root().join("Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep2.url() + ).as_bytes(), + ) + .unwrap(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] git repository `{}` +[COMPILING] dep [..] +[COMPILING] project [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dep2.url() + )), + ); +} + +#[test] +fn update_one_source_updates_all_packages_in_that_git_source() { + let dep = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "dep" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let repo = git2::Repository::open(&dep.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Just be sure to change a file + File::create(&dep.root().join("src/lib.rs")) + .unwrap() + .write_all( + br#" + pub fn bar() -> i32 { 2 } + "#, + ) + .unwrap(); + git::add(&repo); + git::commit(&repo); + + assert_that( + p.cargo("update").arg("-p").arg("dep"), + execs().with_status(0), + ); + let mut lockfile = String::new(); + File::open(&p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lockfile) + .unwrap(); + assert!( + !lockfile.contains(&rev1.to_string()), + "{} in {}", + rev1, + lockfile + ); +} + +#[test] +fn switch_sources() { + let a1 = git::new("a1", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + let a2 = git::new("a2", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("project") + .file( + "Cargo.toml", + r#" + [project] + name = "project" + version = "0.5.0" + authors = [] + [dependencies.b] + path = "b" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + &format!( + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, + a1.url() + ), + ) + .file("b/src/lib.rs", "pub fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]a1` +[COMPILING] a v0.5.0 ([..]a1#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] project v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + File::create(&p.root().join("b/Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, + a2.url() + ).as_bytes(), + ) + .unwrap(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]a2` +[COMPILING] a v0.5.1 ([..]a2#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] project v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn dont_require_submodules_are_checked_out() { + let p = project("foo").build(); + let git1 = git::new("dep1", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ).file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("a/foo", "") + }).unwrap(); + let git2 = git::new("dep2", |p| p).unwrap(); + + let repo = git2::Repository::open(&git1.root()).unwrap(); + let url = path2url(git2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("a/submodule")); + git::commit(&repo); + + git2::Repository::init(&p.root()).unwrap(); + let url = path2url(git1.root()).to_string(); + let dst = paths::home().join("foo"); + git2::Repository::clone(&url, &dst).unwrap(); + + assert_that( + git1.cargo("build").arg("-v").cwd(&dst), + execs().with_status(0), + ); +} + +#[test] +fn doctest_same_name() { + let a2 = git::new("a2", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ).file("src/lib.rs", "pub fn a2() {}") + }).unwrap(); + + let a1 = git::new("a1", |p| { + p.file( + "Cargo.toml", + &format!( + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + a = {{ git = '{}' }} + "#, + a2.url() + ), + ).file("src/lib.rs", "extern crate a; pub fn a1() {}") + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a1.url() + ), + ) + .file( + "src/lib.rs", + r#" + #[macro_use] + extern crate a; + "#, + ) + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn lints_are_suppressed() { + let a = git::new("a", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ).file( + "src/lib.rs", + " + use std::option; + ", + ) + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn denied_lints_are_allowed() { + let a = git::new("a", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ).file( + "src/lib.rs", + " + #![deny(warnings)] + use std::option; + ", + ) + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn add_a_git_dep() { + let git = git::new("git", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "git" + version = "0.5.0" + authors = [] + "#, + ).file("src/lib.rs", "") + }).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ path = 'a' }} + git = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + File::create(p.root().join("a/Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{}' }} + "#, + git.url() + ).as_bytes(), + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn two_at_rev_instead_of_tag() { + let git = git::new("git", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "git1" + version = "0.5.0" + authors = [] + "#, + ).file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "git2" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + }).unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag( + "v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false, + ).unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git1 = {{ git = '{0}', rev = 'v0.1.0' }} + git2 = {{ git = '{0}', rev = 'v0.1.0' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +#[ignore] // accesses crates.io +fn include_overrides_gitignore() { + let p = git::new("reduction", |repo| { + repo.file( + "Cargo.toml", + r#" + [package] + name = "reduction" + version = "0.5.0" + authors = ["pnkfelix"] + build = "tango-build.rs" + include = ["src/lib.rs", "src/incl.rs", "src/mod.md", "tango-build.rs", "Cargo.toml"] + + [build-dependencies] + filetime = "0.1" + "#, + ).file( + ".gitignore", + r#" + target + Cargo.lock + # Below files represent generated code, thus not managed by `git` + src/incl.rs + src/not_incl.rs + "#, + ) + .file( + "tango-build.rs", + r#" + extern crate filetime; + use filetime::FileTime; + use std::fs::{self, File}; + + fn main() { + // generate files, or bring their timestamps into sync. + let source = "src/mod.md"; + + let metadata = fs::metadata(source).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + let atime = FileTime::from_last_access_time(&metadata); + + // sync time stamps for generated files with time stamp of source file. + + let files = ["src/not_incl.rs", "src/incl.rs"]; + for file in files.iter() { + File::create(file).unwrap(); + filetime::set_file_times(file, atime, mtime).unwrap(); + } + } + "#, + ) + .file( + "src/lib.rs", + r#" + mod not_incl; + mod incl; + "#, + ) + .file( + "src/mod.md", + r#" + (The content of this file does not matter since we are not doing real codegen.) + "#, + ) + }).unwrap(); + + println!("build 1: all is new"); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] filetime [..] +[DOWNLOADING] libc [..] +[COMPILING] libc [..] +[RUNNING] `rustc --crate-name libc [..]` +[COMPILING] filetime [..] +[RUNNING] `rustc --crate-name filetime [..]` +[COMPILING] reduction [..] +[RUNNING] `rustc --crate-name build_script_tango_build tango-build.rs --crate-type bin [..]` +[RUNNING] `[..][/]build-script-tango-build` +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + println!("build 2: nothing changed; file timestamps reset by build script"); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] libc [..] +[FRESH] filetime [..] +[FRESH] reduction [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + println!("build 3: touch `src/not_incl.rs`; expect build script *not* re-run"); + sleep_ms(1000); + File::create(p.root().join("src").join("not_incl.rs")).unwrap(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] libc [..] +[FRESH] filetime [..] +[COMPILING] reduction [..] +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + // This final case models the bug from rust-lang/cargo#4135: an + // explicitly included file should cause a build-script re-run, + // even if that same file is matched by `.gitignore`. + println!("build 4: touch `src/incl.rs`; expect build script re-run"); + sleep_ms(1000); + File::create(p.root().join("src").join("incl.rs")).unwrap(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[FRESH] libc [..] +[FRESH] filetime [..] +[COMPILING] reduction [..] +[RUNNING] `[..][/]build-script-tango-build` +[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn invalid_git_dependency_manifest() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + categories = ["algorithms"] + categories = ["algorithms"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + assert_that( + project.cargo("build"), + execs().with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + error: failed to load source for a dependency on `dep1`\n\ + \n\ + Caused by:\n \ + Unable to update {}\n\ + \n\ + Caused by:\n \ + failed to parse manifest at `[..]`\n\ + \n\ + Caused by:\n \ + could not parse input as TOML\n\ + \n\ + Caused by:\n \ + duplicate key: `categories` for key `project`", + path2url(git_root.clone()), + path2url(git_root), + )), + ); +} + +#[test] +fn failed_submodule_checkout() { + let project = project("foo"); + let git_project = git::new("dep1", |project| { + project.file( + "Cargo.toml", + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = [""] + "#, + ) + }).unwrap(); + + let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")).unwrap(); + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + let done = Arc::new(AtomicBool::new(false)); + let done2 = done.clone(); + + let t = thread::spawn(move || { + while !done2.load(Ordering::SeqCst) { + if let Ok((mut socket, _)) = listener.accept() { + drop(socket.write_all(b"foo\r\n")); + } + } + }); + + let repo = git2::Repository::open(&git_project2.root()).unwrap(); + let url = format!("http://{}:{}/", addr.ip(), addr.port()); + { + let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap(); + let subrepo = s.open().unwrap(); + let mut cfg = subrepo.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + git::commit(&subrepo); + s.add_finalize().unwrap(); + } + git::commit(&repo); + drop((repo, url)); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + drop(repo); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + dep1 = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + project.cargo("build"), + execs() + .with_status(101) + .with_stderr_contains(" failed to update submodule `src`") + .with_stderr_contains(" failed to update submodule `bar`"), + ); + assert_that( + project.cargo("build"), + execs() + .with_status(101) + .with_stderr_contains(" failed to update submodule `src`") + .with_stderr_contains(" failed to update submodule `bar`"), + ); + + done.store(true, Ordering::SeqCst); + drop(TcpStream::connect(&addr)); + t.join().unwrap(); +} diff --git a/tests/testsuite/hamcrest.rs b/tests/testsuite/hamcrest.rs new file mode 100644 index 000000000..53d8642f9 --- /dev/null +++ b/tests/testsuite/hamcrest.rs @@ -0,0 +1,101 @@ +use std::fmt; +use std::marker; +use std::path::Path; + +pub type MatchResult = Result<(), String>; + +pub trait Matcher: fmt::Debug { + fn matches(&self, actual: T) -> Result<(), String>; +} + +pub fn assert_that>(actual: T, matcher: U) { + if let Err(e) = matcher.matches(actual) { + panic!("\nExpected: {:?}\n but: {}", matcher, e) + } +} + +pub fn existing_file() -> ExistingFile { + ExistingFile +} + +#[derive(Debug)] +pub struct ExistingFile; + +impl

Matcher

for ExistingFile +where + P: AsRef, +{ + fn matches(&self, actual: P) -> Result<(), String> { + if actual.as_ref().is_file() { + Ok(()) + } else { + Err(format!("{} was not a file", actual.as_ref().display())) + } + } +} + +pub fn existing_dir() -> ExistingDir { + ExistingDir +} + +#[derive(Debug)] +pub struct ExistingDir; + +impl

Matcher

for ExistingDir +where + P: AsRef, +{ + fn matches(&self, actual: P) -> Result<(), String> { + if actual.as_ref().is_dir() { + Ok(()) + } else { + Err(format!("{} was not a dir", actual.as_ref().display())) + } + } +} + +pub fn is_not>(matcher: M) -> IsNot { + IsNot { + matcher, + _marker: marker::PhantomData, + } +} + +#[derive(Debug)] +pub struct IsNot { + matcher: M, + _marker: marker::PhantomData, +} + +impl> Matcher for IsNot +where + T: fmt::Debug, +{ + fn matches(&self, actual: T) -> Result<(), String> { + match self.matcher.matches(actual) { + Ok(_) => Err("matched".to_string()), + Err(_) => Ok(()), + } + } +} + +pub fn contains(item: Vec) -> Contains { + Contains(item) +} + +#[derive(Debug)] +pub struct Contains(Vec); + +impl<'a, T> Matcher<&'a Vec> for Contains +where + T: fmt::Debug + PartialEq, +{ + fn matches(&self, actual: &'a Vec) -> Result<(), String> { + for item in self.0.iter() { + if !actual.contains(item) { + return Err(format!("failed to find {:?}", item)); + } + } + Ok(()) + } +} diff --git a/tests/testsuite/init.rs b/tests/testsuite/init.rs new file mode 100644 index 000000000..667bc6202 --- /dev/null +++ b/tests/testsuite/init.rs @@ -0,0 +1,620 @@ +use cargotest; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::env; + +use cargo::util::ProcessBuilder; +use cargotest::support::{cargo_exe, execs, paths}; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; +use tempfile; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::process(&cargo_exe()); + p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); + p +} + +#[test] +fn simple_lib() { + assert_that( + cargo_process("init") + .arg("--lib") + .arg("--vcs") + .arg("none") + .env("USER", "foo"), + execs() + .with_status(0) + .with_stderr("[CREATED] library project"), + ); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".gitignore"), is_not(existing_file())); + + assert_that(cargo_process("build"), execs().with_status(0)); +} + +#[test] +fn simple_bin() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + assert_that( + cargo_process("init") + .arg("--bin") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs() + .with_status(0) + .with_stderr("[CREATED] binary (application) project"), + ); + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); + + assert_that(cargo_process("build").cwd(&path), execs().with_status(0)); + assert_that( + &paths::root().join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)), + existing_file(), + ); +} + +#[test] +fn both_lib_and_bin() { + let td = tempfile::Builder::new().prefix("cargo").tempdir().unwrap(); + assert_that( + cargo_process("init") + .arg("--lib") + .arg("--bin") + .cwd(td.path()) + .env("USER", "foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] can't specify both lib and binary outputs"), + ); +} + +fn bin_already_exists(explicit: bool, rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + fn main() { + println!("Hello, world 2!"); + } + "#; + + File::create(&sourcefile_path) + .unwrap() + .write_all(content) + .unwrap(); + + if explicit { + assert_that( + cargo_process("init") + .arg("--bin") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs().with_status(0), + ); + } else { + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs().with_status(0), + ); + } + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that( + &paths::root().join("foo/src/lib.rs"), + is_not(existing_file()), + ); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path) + .unwrap() + .read_to_end(&mut new_content) + .unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[test] +fn bin_already_exists_explicit() { + bin_already_exists(true, "src/main.rs") +} + +#[test] +fn bin_already_exists_implicit() { + bin_already_exists(false, "src/main.rs") +} + +#[test] +fn bin_already_exists_explicit_nosrc() { + bin_already_exists(true, "main.rs") +} + +#[test] +fn bin_already_exists_implicit_nosrc() { + bin_already_exists(false, "main.rs") +} + +#[test] +fn bin_already_exists_implicit_namenosrc() { + bin_already_exists(false, "foo.rs") +} + +#[test] +fn bin_already_exists_implicit_namesrc() { + bin_already_exists(false, "src/foo.rs") +} + +#[test] +fn confused_by_multiple_lib_files() { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path1 = path.join("src/lib.rs"); + + File::create(&sourcefile_path1) + .unwrap() + .write_all( + br#" + fn qqq () { + println!("Hello, world 2!"); + } + "#, + ) + .unwrap(); + + let sourcefile_path2 = path.join("lib.rs"); + + File::create(&sourcefile_path2) + .unwrap() + .write_all( + br#" + fn qqq () { + println!("Hello, world 3!"); + } + "#, + ) + .unwrap(); + + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs().with_status(101).with_stderr( + "[ERROR] cannot have a project with multiple libraries, found both `src/lib.rs` and `lib.rs`", + ), + ); + + assert_that( + &paths::root().join("foo/Cargo.toml"), + is_not(existing_file()), + ); +} + +#[test] +fn multibin_project_name_clash() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + + let sourcefile_path1 = path.join("foo.rs"); + + File::create(&sourcefile_path1) + .unwrap() + .write_all( + br#" + fn main () { + println!("Hello, world 2!"); + } + "#, + ) + .unwrap(); + + let sourcefile_path2 = path.join("main.rs"); + + File::create(&sourcefile_path2) + .unwrap() + .write_all( + br#" + fn main () { + println!("Hello, world 3!"); + } + "#, + ) + .unwrap(); + + assert_that( + cargo_process("init") + .arg("--lib") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs().with_status(101).with_stderr( + "\ +[ERROR] multiple possible binary sources found: + main.rs + foo.rs +cannot automatically generate Cargo.toml as the main target would be ambiguous +", + ), + ); + + assert_that( + &paths::root().join("foo/Cargo.toml"), + is_not(existing_file()), + ); +} + +fn lib_already_exists(rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + pub fn qqq() {} + "#; + + File::create(&sourcefile_path) + .unwrap() + .write_all(content) + .unwrap(); + + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("none") + .env("USER", "foo") + .cwd(&path), + execs().with_status(0), + ); + + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that( + &paths::root().join("foo/src/main.rs"), + is_not(existing_file()), + ); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path) + .unwrap() + .read_to_end(&mut new_content) + .unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[test] +fn lib_already_exists_src() { + lib_already_exists("src/lib.rs") +} + +#[test] +fn lib_already_exists_nosrc() { + lib_already_exists("lib.rs") +} + +#[test] +fn simple_git() { + assert_that( + cargo_process("init") + .arg("--lib") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); +} + +#[test] +fn auto_git() { + let td = tempfile::Builder::new().prefix("cargo").tempdir().unwrap(); + let foo = &td.path().join("foo"); + fs::create_dir_all(&foo).unwrap(); + assert_that( + cargo_process("init") + .arg("--lib") + .cwd(foo.clone()) + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&foo.join("Cargo.toml"), existing_file()); + assert_that(&foo.join("src/lib.rs"), existing_file()); + assert_that(&foo.join(".git"), existing_dir()); + assert_that(&foo.join(".gitignore"), existing_file()); +} + +#[test] +fn invalid_dir_name() { + let foo = &paths::root().join("foo.bar"); + fs::create_dir_all(&foo).unwrap(); + assert_that( + cargo_process("init").cwd(foo.clone()).env("USER", "foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Invalid character `.` in crate name: `foo.bar` +use --name to override crate name +", + ), + ); + + assert_that(&foo.join("Cargo.toml"), is_not(existing_file())); +} + +#[test] +fn reserved_name() { + let test = &paths::root().join("test"); + fs::create_dir_all(&test).unwrap(); + assert_that( + cargo_process("init").cwd(test.clone()).env("USER", "foo"), + execs().with_status(101).with_stderr( + "\ +[ERROR] The name `test` cannot be used as a crate name\n\ +use --name to override crate name +", + ), + ); + + assert_that(&test.join("Cargo.toml"), is_not(existing_file())); +} + +#[test] +fn git_autodetect() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that( + cargo_process("init").arg("--lib").env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); +} + +#[test] +fn mercurial_autodetect() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + assert_that( + cargo_process("init").arg("--lib").env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), is_not(existing_dir())); + assert_that(&paths::root().join(".hgignore"), existing_file()); +} + +#[test] +fn gitignore_appended_not_replaced() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join(".gitignore")) + .unwrap() + .write_all(b"qqqqqq\n") + .unwrap(); + + assert_that( + cargo_process("init").arg("--lib").env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join("Cargo.toml"), existing_file()); + assert_that(&paths::root().join("src/lib.rs"), existing_file()); + assert_that(&paths::root().join(".git"), existing_dir()); + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"qqqqqq"#)); +} + +#[test] +fn gitignore_added_newline_if_required() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join(".gitignore")) + .unwrap() + .write_all(b"first") + .unwrap(); + + assert_that( + cargo_process("init").arg("--lib").env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.starts_with("first\n")); +} + +#[test] +fn mercurial_added_newline_if_required() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + File::create(&paths::root().join(".hgignore")) + .unwrap() + .write_all(b"first") + .unwrap(); + + assert_that( + cargo_process("init").arg("--lib").env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".hgignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".hgignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.starts_with("first\n")); +} + +#[test] +fn cargo_lock_gitignored_if_lib1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that( + cargo_process("init") + .arg("--lib") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_gitignored_if_lib2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("lib.rs")) + .unwrap() + .write_all(br#""#) + .unwrap(); + + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_not_gitignored_if_bin1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("git") + .arg("--bin") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn cargo_lock_not_gitignored_if_bin2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("main.rs")) + .unwrap() + .write_all(br#""#) + .unwrap(); + + assert_that( + cargo_process("init") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(&paths::root().join(".gitignore"), existing_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[test] +fn with_argument() { + assert_that( + cargo_process("init") + .arg("foo") + .arg("--vcs") + .arg("none") + .env("USER", "foo"), + execs().with_status(0), + ); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); +} + +#[test] +fn unknown_flags() { + assert_that( + cargo_process("init").arg("foo").arg("--flag"), + execs().with_status(1).with_stderr_contains( + "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", + ), + ); +} + +#[cfg(not(windows))] +#[test] +fn no_filename() { + assert_that( + cargo_process("init").arg("/"), + execs().with_status(101).with_stderr( + "[ERROR] cannot auto-detect project name from path \"/\" ; use --name to override" + .to_string(), + ), + ); +} diff --git a/tests/testsuite/install.rs b/tests/testsuite/install.rs new file mode 100644 index 000000000..250429e16 --- /dev/null +++ b/tests/testsuite/install.rs @@ -0,0 +1,1489 @@ +use cargotest; +use std::fs::{self, File, OpenOptions}; +use std::io::prelude::*; + +use cargo::util::ProcessBuilder; +use cargotest::install::{cargo_home, has_installed_exe}; +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::{assert_that, existing_dir, is_not}; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::cargo_process(); + p.arg(s); + p +} + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/lib.rs", "") + .file( + "src/main.rs", + &format!( + " + extern crate {}; + fn main() {{}} + ", + name + ), + ) + .publish(); +} + +#[test] +fn simple() { + pkg("foo", "0.0.1"); + + assert_that( + cargo_process("install").arg("foo"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 (registry [..]) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + + assert_that( + cargo_process("uninstall").arg("foo"), + execs().with_status(0).with_stderr(&format!( + "[REMOVING] {home}[..]bin[..]foo[..]", + home = cargo_home().display() + )), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); +} + +#[test] +fn multiple_pkgs() { + pkg("foo", "0.0.1"); + pkg("bar", "0.0.2"); + + assert_that( + cargo_process("install").args(&["foo", "bar", "baz"]), + execs().with_status(101).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 (registry `file://[..]`) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +[DOWNLOADING] bar v0.0.2 (registry `file://[..]`) +[INSTALLING] bar v0.0.2 +[COMPILING] bar v0.0.2 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]bar[..] +error: could not find `baz` in registry `[..]` +[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +error: some crates failed to install +", + home = cargo_home().display() + )), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); + + assert_that( + cargo_process("uninstall").args(&["foo", "bar"]), + execs().with_status(0).with_stderr(&format!( + "\ +[REMOVING] {home}[..]bin[..]foo[..] +[REMOVING] {home}[..]bin[..]bar[..] +[SUMMARY] Successfully uninstalled foo, bar! +", + home = cargo_home().display() + )), + ); + + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); +} + +#[test] +fn pick_max_version() { + pkg("foo", "0.0.1"); + pkg("foo", "0.0.2"); + + assert_that( + cargo_process("install").arg("foo"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.2 (registry [..]) +[INSTALLING] foo v0.0.2 +[COMPILING] foo v0.0.2 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn missing() { + pkg("foo", "0.0.1"); + assert_that( + cargo_process("install").arg("bar"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[ERROR] could not find `bar` in registry `[..]` +", + ), + ); +} + +#[test] +fn bad_version() { + pkg("foo", "0.0.1"); + assert_that( + cargo_process("install").arg("foo").arg("--vers=0.2.0"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0` +", + ), + ); +} + +#[test] +fn no_crate() { + assert_that( + cargo_process("install"), + execs().with_status(101).with_stderr( + "\ +[ERROR] `[..]` is not a crate root; specify a crate to install [..] + +Caused by: + failed to read `[..]Cargo.toml` + +Caused by: + [..] (os error [..]) +", + ), + ); +} + +#[test] +fn install_location_precedence() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let t1 = root.join("t1"); + let t2 = root.join("t2"); + let t3 = root.join("t3"); + let t4 = cargo_home(); + + fs::create_dir(root.join(".cargo")).unwrap(); + File::create(root.join(".cargo/config")) + .unwrap() + .write_all( + format!( + "\ + [install] + root = '{}' + ", + t3.display() + ).as_bytes(), + ) + .unwrap(); + + println!("install --root"); + + assert_that( + cargo_process("install") + .arg("foo") + .arg("--root") + .arg(&t1) + .env("CARGO_INSTALL_ROOT", &t2), + execs().with_status(0), + ); + assert_that(&t1, has_installed_exe("foo")); + assert_that(&t2, is_not(has_installed_exe("foo"))); + + println!("install CARGO_INSTALL_ROOT"); + + assert_that( + cargo_process("install") + .arg("foo") + .env("CARGO_INSTALL_ROOT", &t2), + execs().with_status(0), + ); + assert_that(&t2, has_installed_exe("foo")); + assert_that(&t3, is_not(has_installed_exe("foo"))); + + println!("install install.root"); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + assert_that(&t3, has_installed_exe("foo")); + assert_that(&t4, is_not(has_installed_exe("foo"))); + + fs::remove_file(root.join(".cargo/config")).unwrap(); + + println!("install cargo home"); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + assert_that(&t4, has_installed_exe("foo")); +} + +#[test] +fn install_path() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that( + cargo_process("install") + .arg("--path") + .arg(".") + .cwd(p.root()), + execs().with_status(101).with_stderr( + "\ +[INSTALLING] foo v0.1.0 [..] +[ERROR] binary `foo[..]` already exists in destination as part of `foo v0.1.0 [..]` +Add --force to overwrite +", + ), + ); +} + +#[test] +fn multiple_crates_error() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--git") + .arg(p.url().to_string()), + execs().with_status(101).with_stderr( + "\ +[UPDATING] git repository [..] +[ERROR] multiple packages with binaries found: bar, foo +", + ), + ); +} + +#[test] +fn multiple_crates_select() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--git") + .arg(p.url().to_string()) + .arg("foo"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); + + assert_that( + cargo_process("install") + .arg("--git") + .arg(p.url().to_string()) + .arg("bar"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("bar")); +} + +#[test] +fn multiple_crates_auto_binaries() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn multiple_crates_auto_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file( + "examples/foo.rs", + " + extern crate bar; + extern crate foo; + fn main() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + cargo_process("install") + .arg("--path") + .arg(p.root()) + .arg("--example=foo"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn no_binaries_or_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs() + .with_status(101) + .with_stderr("[ERROR] no packages found with binaries or examples"), + ); +} + +#[test] +fn no_binaries() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--path") + .arg(p.root()) + .arg("foo"), + execs().with_status(101).with_stderr( + "\ +[INSTALLING] foo [..] +[ERROR] specified package has no binaries +", + ), + ); +} + +#[test] +fn examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("examples/foo.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--path") + .arg(p.root()) + .arg("--example=foo"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn install_twice() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(101).with_stderr( + "\ +[INSTALLING] foo v0.1.0 [..] +[ERROR] binary `foo-bin1[..]` already exists in destination as part of `foo v0.1.0 ([..])` +binary `foo-bin2[..]` already exists in destination as part of `foo v0.1.0 ([..])` +Add --force to overwrite +", + ), + ); +} + +#[test] +fn install_force() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + + let p = project("foo2") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--force") + .arg("--path") + .arg(p.root()), + execs().with_status(0).with_stderr(&format!( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout( + "\ +foo v0.2.0 ([..]): + foo[..] +", + ), + ); +} + +#[test] +fn install_force_partial_overlap() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + + let p = project("foo2") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/bin/foo-bin2.rs", "fn main() {}") + .file("src/bin/foo-bin3.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--force") + .arg("--path") + .arg(p.root()), + execs().with_status(0).with_stderr(&format!( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo-bin3[..] +[REPLACING] {home}[..]bin[..]foo-bin2[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout( + "\ +foo v0.1.0 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] + foo-bin3[..] +", + ), + ); +} + +#[test] +fn install_force_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + + let p = project("foo2") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--force") + .arg("--bin") + .arg("foo-bin2") + .arg("--path") + .arg(p.root()), + execs().with_status(0).with_stderr(&format!( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] {home}[..]bin[..]foo-bin2[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout( + "\ +foo v0.1.0 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] +", + ), + ); +} + +#[test] +fn compile_failure() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(101).with_stderr_contains( + "\ +[ERROR] failed to compile `foo v0.1.0 ([..])`, intermediate artifacts can be \ + found at `[..]target` + +Caused by: + Could not compile `foo`. + +To learn more, run the command again with --verbose. +", + ), + ); +} + +#[test] +fn git_repo() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // use `--locked` to test that we don't even try to write a lockfile + assert_that( + cargo_process("install") + .arg("--locked") + .arg("--git") + .arg(p.url().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] git repository `[..]` +[INSTALLING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}[..]bin[..]foo[..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + home = cargo_home().display() + )), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn list() { + pkg("foo", "0.0.1"); + pkg("bar", "0.2.1"); + pkg("bar", "0.2.2"); + + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout(""), + ); + + assert_that( + cargo_process("install") + .arg("bar") + .arg("--vers") + .arg("=0.2.1"), + execs().with_status(0), + ); + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout( + "\ +bar v0.2.1: + bar[..] +foo v0.0.1: + foo[..] +", + ), + ); +} + +#[test] +fn list_error() { + pkg("foo", "0.0.1"); + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + assert_that( + cargo_process("install").arg("--list"), + execs().with_status(0).with_stdout( + "\ +foo v0.0.1: + foo[..] +", + ), + ); + let mut worldfile_path = cargo_home(); + worldfile_path.push(".crates.toml"); + let mut worldfile = OpenOptions::new() + .write(true) + .open(worldfile_path) + .expect(".crates.toml should be there"); + worldfile.write_all(b"\x00").unwrap(); + drop(worldfile); + assert_that( + cargo_process("install").arg("--list").arg("--verbose"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse crate metadata at `[..]` + +Caused by: + invalid TOML found for metadata + +Caused by: + unexpected character[..] +", + ), + ); +} + +#[test] +fn uninstall_pkg_does_not_exist() { + assert_that( + cargo_process("uninstall").arg("foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] package id specification `foo` matched no packages"), + ); +} + +#[test] +fn uninstall_bin_does_not_exist() { + pkg("foo", "0.0.1"); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + assert_that( + cargo_process("uninstall").arg("foo").arg("--bin=bar"), + execs() + .with_status(101) + .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`"), + ); +} + +#[test] +fn uninstall_piecemeal() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), has_installed_exe("bar")); + + assert_that( + cargo_process("uninstall").arg("foo").arg("--bin=bar"), + execs().with_status(0).with_stderr("[REMOVING] [..]bar[..]"), + ); + + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); + + assert_that( + cargo_process("uninstall").arg("foo").arg("--bin=foo"), + execs().with_status(0).with_stderr("[REMOVING] [..]foo[..]"), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + + assert_that( + cargo_process("uninstall").arg("foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] package id specification `foo` matched no packages"), + ); +} + +#[test] +fn subcommand_works_out_of_the_box() { + Package::new("cargo-foo", "1.0.0") + .file( + "src/main.rs", + r#" + fn main() { + println!("bar"); + } + "#, + ) + .publish(); + assert_that( + cargo_process("install").arg("cargo-foo"), + execs().with_status(0), + ); + assert_that( + cargo_process("foo"), + execs().with_status(0).with_stdout("bar\n"), + ); + assert_that( + cargo_process("--list"), + execs().with_status(0).with_stdout_contains(" foo\n"), + ); +} + +#[test] +fn installs_from_cwd_by_default() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install").cwd(p.root()), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn do_not_rebuilds_on_local_install() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("--release"), execs().with_status(0)); + assert_that( + cargo_process("install").arg("--path").arg(p.root()), + execs().with_status(0).with_stderr( + "[INSTALLING] [..] +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..] +warning: be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ), + ); + + assert!(p.build_dir().exists()); + assert!(p.release_bin("foo").exists()); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn reports_unsuccessful_subcommand_result() { + Package::new("cargo-fail", "1.0.0") + .file( + "src/main.rs", + r#" + fn main() { + panic!(); + } + "#, + ) + .publish(); + assert_that( + cargo_process("install").arg("cargo-fail"), + execs().with_status(0), + ); + assert_that( + cargo_process("--list"), + execs().with_status(0).with_stdout_contains(" fail\n"), + ); + assert_that( + cargo_process("fail"), + execs() + .with_status(101) + .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]"), + ); +} + +#[test] +fn git_with_lockfile() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "fn main() {}") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.1.0" + dependencies = [ "bar 0.1.0" ] + + [[package]] + name = "bar" + version = "0.1.0" + "#, + ) + .build(); + + assert_that( + cargo_process("install") + .arg("--git") + .arg(p.url().to_string()), + execs().with_status(0), + ); +} + +#[test] +fn q_silences_warnings() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("-q") + .arg("--path") + .arg(p.root()), + execs().with_status(0).with_stderr(""), + ); +} + +#[test] +fn readonly_dir() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let dir = &root.join("readonly"); + fs::create_dir(root.join("readonly")).unwrap(); + let mut perms = fs::metadata(dir).unwrap().permissions(); + perms.set_readonly(true); + fs::set_permissions(dir, perms).unwrap(); + + assert_that( + cargo_process("install").arg("foo").cwd(dir), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); +} + +#[test] +fn use_path_workspace() { + Package::new("foo", "1.0.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = ["baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "1" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let lock = p.read_lockfile(); + assert_that(p.cargo("install"), execs().with_status(0)); + let lock2 = p.read_lockfile(); + assert_eq!(lock, lock2, "different lockfiles"); +} + +#[test] +fn dev_dependencies_no_check() { + Package::new("foo", "1.0.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dev-dependencies] + baz = "1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(101)); + assert_that(p.cargo("install"), execs().with_status(0)); +} + +#[test] +fn dev_dependencies_lock_file_untouched() { + Package::new("foo", "1.0.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dev-dependencies] + bar = { path = "a" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + let lock = p.read_lockfile(); + assert_that(p.cargo("install"), execs().with_status(0)); + let lock2 = p.read_lockfile(); + assert!(lock == lock2, "different lockfiles"); +} + +#[test] +fn vers_precise() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + assert_that( + cargo_process("install") + .arg("foo") + .arg("--vers") + .arg("0.1.1"), + execs() + .with_status(0) + .with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])"), + ); +} + +#[test] +fn version_too() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + assert_that( + cargo_process("install") + .arg("foo") + .arg("--version") + .arg("0.1.1"), + execs() + .with_status(0) + .with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])"), + ); +} + +#[test] +fn not_both_vers_and_version() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + assert_that( + cargo_process("install") + .arg("foo") + .arg("--version") + .arg("0.1.1") + .arg("--vers") + .arg("0.1.2"), + execs().with_status(1).with_stderr_contains( + "\ +error: The argument '--version ' was provided more than once, \ +but cannot be used multiple times +", + ), + ); +} + +#[test] +fn legacy_version_requirement() { + pkg("foo", "0.1.1"); + + assert_that( + cargo_process("install").arg("foo").arg("--vers").arg("0.1"), + execs().with_status(0).with_stderr_contains( + "\ +warning: the `--vers` provided, `0.1`, is not a valid semver version + +historically Cargo treated this as a semver version requirement accidentally +and will continue to do so, but this behavior will be removed eventually +", + ), + ); +} + +#[test] +fn test_install_git_cannot_be_a_base_url() { + assert_that(cargo_process("install").arg("--git").arg("github.com:rust-lang-nursery/rustfmt.git"), + execs().with_status(101).with_stderr("error: invalid url `github.com:rust-lang-nursery/rustfmt.git`: cannot-be-a-base-URLs are not supported")); +} + +#[test] +fn uninstall_multiple_and_specifying_bin() { + assert_that(cargo_process("uninstall").args(&["foo", "bar"]).arg("--bin").arg("baz"), + execs().with_status(101).with_stderr("error: A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.")); +} + +#[test] +fn uninstall_multiple_and_some_pkg_does_not_exist() { + pkg("foo", "0.0.1"); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); + + assert_that( + cargo_process("uninstall").args(&["foo", "bar"]), + execs().with_status(101).with_stderr(&format!( + "\ +[REMOVING] {home}[..]bin[..]foo[..] +error: package id specification `bar` matched no packages +[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above). +error: some packages failed to uninstall +", + home = cargo_home().display() + )), + ); + + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + assert_that(cargo_home(), is_not(has_installed_exe("bar"))); +} + +#[test] +fn custom_target_dir_for_git_source() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + cargo_process("install") + .arg("--git") + .arg(p.url().to_string()), + execs().with_status(0), + ); + assert_that( + &paths::root().join("target/release"), + is_not(existing_dir()), + ); + + assert_that( + cargo_process("install") + .arg("--force") + .arg("--git") + .arg(p.url().to_string()) + .env("CARGO_TARGET_DIR", "target"), + execs().with_status(0), + ); + assert_that(&paths::root().join("target/release"), existing_dir()); +} + +#[test] +fn install_respects_lock_file() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.1.1") + .file("src/lib.rs", "not rust") + .publish(); + Package::new("foo", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "") + .file( + "src/main.rs", + " + extern crate foo; + extern crate bar; + fn main() {} + ", + ) + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] +"#, + ) + .publish(); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); +} + +#[test] +fn lock_file_path_deps_ok() { + Package::new("bar", "0.1.0").publish(); + + Package::new("foo", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "") + .file( + "src/main.rs", + " + extern crate foo; + extern crate bar; + fn main() {} + ", + ) + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0", +] +"#, + ) + .publish(); + + assert_that(cargo_process("install").arg("foo"), execs().with_status(0)); +} + +#[test] +fn install_empty_argument() { + // Bug 5229 + assert_that( + cargo_process("install").arg(""), + execs().with_status(1).with_stderr_contains( + "[ERROR] The argument '...' requires a value but none was supplied", + ), + ); +} diff --git a/tests/testsuite/jobserver.rs b/tests/testsuite/jobserver.rs new file mode 100644 index 000000000..e04ec9e9c --- /dev/null +++ b/tests/testsuite/jobserver.rs @@ -0,0 +1,221 @@ +use std::net::TcpListener; +use std::thread; +use std::process::Command; + +use cargotest::support::{cargo_exe, execs, project}; +use hamcrest::assert_that; + +#[test] +fn jobserver_exists() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "build.rs", + r#" + use std::env; + + fn main() { + let var = env::var("CARGO_MAKEFLAGS").unwrap(); + let arg = var.split(' ') + .find(|p| p.starts_with("--jobserver")) + .unwrap(); + let val = &arg[arg.find('=').unwrap() + 1..]; + validate(val); + } + + #[cfg(unix)] + fn validate(s: &str) { + use std::fs::File; + use std::io::*; + use std::os::unix::prelude::*; + + let fds = s.split(',').collect::>(); + println!("{}", s); + assert_eq!(fds.len(), 2); + unsafe { + let mut read = File::from_raw_fd(fds[0].parse().unwrap()); + let mut write = File::from_raw_fd(fds[1].parse().unwrap()); + + let mut buf = [0]; + assert_eq!(read.read(&mut buf).unwrap(), 1); + assert_eq!(write.write(&buf).unwrap(), 1); + } + } + + #[cfg(windows)] + fn validate(_: &str) { + // a little too complicated for a test... + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn makes_jobserver_used() { + let make = if cfg!(windows) { + "mingw32-make" + } else { + "make" + }; + if Command::new(make).arg("--version").output().is_err() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "d3" } + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d2/src/lib.rs", "") + .file( + "d3/Cargo.toml", + r#" + [package] + name = "d3" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d3/src/lib.rs", "") + .file( + "dbuild.rs", + r#" + use std::net::TcpStream; + use std::env; + use std::io::Read; + + fn main() { + let addr = env::var("ADDR").unwrap(); + let mut stream = TcpStream::connect(addr).unwrap(); + let mut v = Vec::new(); + stream.read_to_end(&mut v).unwrap(); + } + "#, + ) + .file( + "Makefile", + "\ +all: +\t+$(CARGO) build +", + ) + .build(); + + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = l.local_addr().unwrap(); + + let child = thread::spawn(move || { + let a1 = l.accept().unwrap(); + let a2 = l.accept().unwrap(); + l.set_nonblocking(true).unwrap(); + + for _ in 0..1000 { + assert!(l.accept().is_err()); + thread::yield_now(); + } + + drop(a1); + l.set_nonblocking(false).unwrap(); + let a3 = l.accept().unwrap(); + + drop((a2, a3)); + }); + + assert_that( + p.process(make) + .env("CARGO", cargo_exe()) + .env("ADDR", addr.to_string()) + .arg("-j2"), + execs().with_status(0), + ); + child.join().unwrap(); +} + +#[test] +fn jobserver_and_j() { + let make = if cfg!(windows) { + "mingw32-make" + } else { + "make" + }; + if Command::new(make).arg("--version").output().is_err() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "Makefile", + "\ +all: +\t+$(CARGO) build -j2 +", + ) + .build(); + + assert_that( + p.process(make).env("CARGO", cargo_exe()).arg("-j2"), + execs().with_status(0).with_stderr( + "\ +warning: a `-j` argument was passed to Cargo but Cargo is also configured \ +with an external jobserver in its environment, ignoring the `-j` parameter +[COMPILING] [..] +[FINISHED] [..] +", + ), + ); +} diff --git a/tests/testsuite/local_registry.rs b/tests/testsuite/local_registry.rs new file mode 100644 index 000000000..4f73d43de --- /dev/null +++ b/tests/testsuite/local_registry.rs @@ -0,0 +1,491 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "# + )); +} + +#[test] +fn simple() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn multiple_versions() { + setup(); + Package::new("foo", "0.0.1").local(true).publish(); + Package::new("foo", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); + + Package::new("foo", "0.2.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + assert_that( + p.cargo("update").arg("-v"), + execs() + .with_status(0) + .with_stderr("[UPDATING] foo v0.1.0 -> v0.2.0"), + ); +} + +#[test] +fn multiple_names() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn interdependent() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn path_dep_rewritten() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", version = "*" } + "#, + ) + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn invalid_dir_bad() { + setup(); + let p = project("local") + .file( + "Cargo.toml", + r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = '/path/to/nowhere' + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + failed to update replaced source registry `https://[..]` + +Caused by: + local registry path is not a directory: [..]path[..]to[..]nowhere +", + ), + ); +} + +#[test] +fn different_directory_replacing_the_registry_is_bad() { + setup(); + + // Move our test's .cargo/config to a temporary location and publish a + // registry package we're going to use first. + let config = paths::root().join(".cargo"); + let config_tmp = paths::root().join(".cargo-old"); + t!(fs::rename(&config, &config_tmp)); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + // Generate a lock file against the crates.io registry + Package::new("foo", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + // Switch back to our directory source, and now that we're replacing + // crates.io make sure that this fails because we're replacing with a + // different checksum + config.rm_rf(); + t!(fs::rename(&config_tmp, &config)); + Package::new("foo", "0.0.1") + .file("src/lib.rs", "invalid") + .local(true) + .publish(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] checksum for `foo v0.0.1` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.0.1` is the same as when the lockfile was generated + +", + ), + ); +} + +#[test] +fn crates_io_registry_url_is_optional() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "# + )); + + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UNPACKING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url() + )), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + assert_that(p.cargo("test"), execs().with_status(0)); +} diff --git a/tests/testsuite/lockfile_compat.rs b/tests/testsuite/lockfile_compat.rs new file mode 100644 index 000000000..f335a778a --- /dev/null +++ b/tests/testsuite/lockfile_compat.rs @@ -0,0 +1,507 @@ +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, lines_match, project}; +use hamcrest::assert_that; + +#[test] +fn oldest_lockfile_still_works() { + let cargo_commands = vec!["build", "update"]; + for cargo_command in cargo_commands { + oldest_lockfile_still_works_with_command(cargo_command); + } +} + +fn oldest_lockfile_still_works_with_command(cargo_command: &str) { + Package::new("foo", "0.1.0").publish(); + + let expected_lockfile = r#"[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]" +"#; + + let old_lockfile = r#"[root] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "zzz" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", old_lockfile) + .build(); + + assert_that(p.cargo(cargo_command), execs().with_status(0)); + + let lock = p.read_lockfile(); + for (l, r) in expected_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), expected_lockfile.lines().count()); +} + +#[test] +fn frozen_flag_preserves_old_lockfile() { + let cksum = Package::new("foo", "0.1.0").publish(); + + let old_lockfile = format!( + r#"[root] +name = "zzz" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" +"#, + cksum, + ); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "zzz" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", &old_lockfile) + .build(); + + assert_that(p.cargo("build").arg("--locked"), execs().with_status(0)); + + let lock = p.read_lockfile(); + for (l, r) in old_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), old_lockfile.lines().count()); +} + +#[test] +fn totally_wild_checksums_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#, + ); + + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let lock = p.read_lockfile(); + assert!( + lock.starts_with( + r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"#.trim() + ) + ); +} + +#[test] +fn wrong_checksum_is_an_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#, + ); + + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +", + ), + ); +} + +// If the checksum is unlisted in the lockfile (e.g. ) yet we can +// calculate it (e.g. it's a registry dep), then we should in theory just fill +// it in. +#[test] +fn unlisted_checksum_is_bad_if_we_calculate() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#, + ); + let p = p.build(); + + assert_that( + p.cargo("fetch"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \ +could now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +", + ), + ); +} + +// If the checksum is listed in the lockfile yet we cannot calculate it (e.g. +// git dependencies as of today), then make sure we choke. +#[test] +fn listed_checksum_bad_if_we_cannot_compute() { + let git = git::new("foo", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ).file("src/lib.rs", "") + }).unwrap(); + + let p = project("bar") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + &format!( + r#" +[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (git+{0})" +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum foo 0.1.0 (git+{0})" = "checksum" +"#, + git.url() + ), + ); + + let p = p.build(); + + assert_that( + p.cargo("fetch"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] git repository `[..]` +error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `foo v0.1.0 ([..])` is the same as when the lockfile was generated + +", + ), + ); +} + +#[test] +fn current_lockfile_format() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let actual = p.read_lockfile(); + + let expected = "\ +[[package]] +name = \"bar\" +version = \"0.0.1\" +dependencies = [ + \"foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\", +] + +[[package]] +name = \"foo\" +version = \"0.1.0\" +source = \"registry+https://github.com/rust-lang/crates.io-index\" + +[metadata] +\"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\" = \"[..]\""; + + for (l, r) in expected.lines().zip(actual.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(actual.lines().count(), expected.lines().count()); +} + +#[test] +fn lockfile_without_root() { + Package::new("foo", "0.1.0").publish(); + + let lockfile = r#"[[package]] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", lockfile); + + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let lock = p.read_lockfile(); + assert!(lock.starts_with(lockfile.trim())); +} + +#[test] +fn locked_correct_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("build").arg("--locked"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry `[..]` +error: the lock file needs to be updated but --locked was passed to prevent this +", + ), + ); +} diff --git a/tests/testsuite/login.rs b/tests/testsuite/login.rs new file mode 100644 index 000000000..7ae9f9705 --- /dev/null +++ b/tests/testsuite/login.rs @@ -0,0 +1,195 @@ +use std::io::prelude::*; +use std::fs::{self, File}; + +use toml; +use cargotest::{cargo_process, ChannelChanger}; +use cargotest::support::execs; +use cargotest::support::registry::registry; +use cargotest::install::cargo_home; +use cargo::util::config::Config; +use cargo::core::Shell; +use hamcrest::{assert_that, existing_file, is_not}; + +const TOKEN: &str = "test-token"; +const ORIGINAL_TOKEN: &str = "api-token"; +const CONFIG_FILE: &str = r#" + [registry] + token = "api-token" + + [registries.test-reg] + index = "http://dummy_index/" +"#; + +fn setup_old_credentials() { + let config = cargo_home().join("config"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all(CONFIG_FILE.as_bytes())); +} + +fn setup_new_credentials() { + let config = cargo_home().join("credentials"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all( + format!( + r#" + token = "{token}" + "#, + token = ORIGINAL_TOKEN + ).as_bytes() + )); +} + +fn check_token(expected_token: &str, registry: Option<&str>) -> bool { + let credentials = cargo_home().join("credentials"); + assert_that(&credentials, existing_file()); + + let mut contents = String::new(); + File::open(&credentials) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + let toml: toml::Value = contents.parse().unwrap(); + + let token = match (registry, toml) { + // A registry has been provided, so check that the token exists in a + // table for the registry. + (Some(registry), toml::Value::Table(table)) => table + .get("registries") + .and_then(|registries_table| registries_table.get(registry)) + .and_then(|registry_table| match registry_table.get("token") { + Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()), + _ => None, + }), + // There is no registry provided, so check the global token instead. + (None, toml::Value::Table(table)) => table + .get("registry") + .and_then(|registry_table| registry_table.get("token")) + .and_then(|v| match v { + &toml::Value::String(ref token) => Some(token.as_str().to_string()), + _ => None, + }), + _ => None, + }; + + if let Some(token_val) = token { + token_val == expected_token + } else { + false + } +} + +#[test] +fn login_with_old_credentials() { + setup_old_credentials(); + + assert_that( + cargo_process() + .arg("login") + .arg("--host") + .arg(registry().to_string()) + .arg(TOKEN), + execs().with_status(0), + ); + + let config = cargo_home().join("config"); + assert_that(&config, existing_file()); + + let mut contents = String::new(); + File::open(&config) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!(CONFIG_FILE, contents); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[test] +fn login_with_new_credentials() { + setup_new_credentials(); + + assert_that( + cargo_process() + .arg("login") + .arg("--host") + .arg(registry().to_string()) + .arg(TOKEN), + execs().with_status(0), + ); + + let config = cargo_home().join("config"); + assert_that(&config, is_not(existing_file())); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[test] +fn login_with_old_and_new_credentials() { + setup_new_credentials(); + login_with_old_credentials(); +} + +#[test] +fn login_without_credentials() { + assert_that( + cargo_process() + .arg("login") + .arg("--host") + .arg(registry().to_string()) + .arg(TOKEN), + execs().with_status(0), + ); + + let config = cargo_home().join("config"); + assert_that(&config, is_not(existing_file())); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[test] +fn new_credentials_is_used_instead_old() { + setup_old_credentials(); + setup_new_credentials(); + + assert_that( + cargo_process() + .arg("login") + .arg("--host") + .arg(registry().to_string()) + .arg(TOKEN), + execs().with_status(0), + ); + + let config = Config::new(Shell::new(), cargo_home(), cargo_home()); + + let token = config.get_string("registry.token").unwrap().map(|p| p.val); + assert_eq!(token.unwrap(), TOKEN); +} + +#[test] +fn registry_credentials() { + setup_old_credentials(); + setup_new_credentials(); + + let reg = "test-reg"; + + assert_that( + cargo_process() + .arg("login") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg(reg) + .arg(TOKEN) + .arg("-Zunstable-options"), + execs().with_status(0), + ); + + // Ensure that we have not updated the default token + assert!(check_token(ORIGINAL_TOKEN, None)); + + // Also ensure that we get the new token for the registry + assert!(check_token(TOKEN, Some(reg))); +} diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs new file mode 100644 index 000000000..9f07fa873 --- /dev/null +++ b/tests/testsuite/main.rs @@ -0,0 +1,91 @@ +#![deny(warnings)] + +extern crate bufstream; +extern crate cargo; +extern crate filetime; +extern crate flate2; +extern crate git2; +extern crate glob; +extern crate hex; +extern crate libc; +#[macro_use] +extern crate serde_derive; +#[macro_use] +extern crate serde_json; +extern crate tar; +extern crate tempfile; +extern crate toml; +extern crate url; +#[cfg(windows)] +extern crate winapi; + +#[macro_use] +mod cargotest; +mod hamcrest; + +mod alt_registry; +mod bad_config; +mod bad_manifest_path; +mod bench; +mod build_auth; +mod build_lib; +mod build; +mod build_script_env; +mod build_script; +mod cargo_alias_config; +mod cargo_features; +mod cargo_command; +mod cfg; +mod check; +mod clean; +mod concurrent; +mod config; +mod corrupt_git; +mod cross_compile; +mod cross_publish; +mod custom_target; +mod death; +mod dep_info; +mod directory; +mod doc; +mod features; +mod fetch; +mod freshness; +mod generate_lockfile; +mod git; +mod init; +mod install; +mod jobserver; +mod local_registry; +mod lockfile_compat; +mod login; +mod metadata; +mod net_config; +mod new; +mod overrides; +mod package; +mod patch; +mod path; +mod plugins; +mod proc_macro; +mod profiles; +mod publish; +mod read_manifest; +mod registry; +mod rename_deps; +mod required_features; +mod resolve; +mod run; +mod rustc; +mod rustdocflags; +mod rustdoc; +mod rustflags; +mod search; +mod small_fd_limits; +mod test; +mod tool_paths; +mod update; +mod verify_project; +mod version; +mod warn_on_failure; +mod workspaces; diff --git a/tests/testsuite/metadata.rs b/tests/testsuite/metadata.rs new file mode 100644 index 000000000..4a2154bd9 --- /dev/null +++ b/tests/testsuite/metadata.rs @@ -0,0 +1,869 @@ +use hamcrest::assert_that; +use cargotest::support::registry::Package; +use cargotest::support::{basic_bin_manifest, basic_lib_manifest, execs, main_file, project}; + +#[test] +fn cargo_metadata_simple() { + let p = project("foo") + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "bin" + ], + "crate_types": [ + "bin" + ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]foo.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn cargo_metadata_warns_on_implicit_version() { + let p = project("foo") + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + assert_that(p.cargo("metadata"), + execs().with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems")); + + assert_that( + p.cargo("metadata").arg("--format-version").arg("1"), + execs().with_stderr(""), + ); +} + +#[test] +fn library_with_several_crate_types() { + let p = project("foo") + .file("src/lib.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.5.0" + +[lib] +crate-type = ["lib", "staticlib"] + "#, + ) + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib", + "staticlib" + ], + "crate_types": [ + "lib", + "staticlib" + ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn library_with_features() { + let p = project("foo") + .file("src/lib.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.5.0" + +[features] +default = ["default_feat"] +default_feat = [] +optional_feat = [] + "#, + ) + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + } + ], + "features": { + "default": [ + "default_feat" + ], + "default_feat": [], + "optional_feat": [] + }, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "features": [ + "default", + "default_feat" + ], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn cargo_metadata_with_deps_and_version() { + let p = project("foo") + .file("src/foo.rs", "") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + license = "MIT" + description = "foo" + + [[bin]] + name = "foo" + + [dependencies] + bar = "*" + "#, + ) + .build(); + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); + + assert_that( + p.cargo("metadata") + .arg("-q") + .arg("--format-version") + .arg("1"), + execs().with_json( + r#" + { + "packages": [ + { + "dependencies": [], + "features": {}, + "id": "baz 0.0.1 (registry+[..])", + "manifest_path": "[..]Cargo.toml", + "name": "baz", + "source": "registry+[..]", + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "name": "baz", + "src_path": "[..]lib.rs" + } + ], + "version": "0.0.1" + }, + { + "dependencies": [ + { + "features": [], + "kind": null, + "name": "baz", + "optional": false, + "req": "^0.0.1", + "source": "registry+[..]", + "target": null, + "uses_default_features": true, + "rename": null + } + ], + "features": {}, + "id": "bar 0.0.1 (registry+[..])", + "manifest_path": "[..]Cargo.toml", + "name": "bar", + "source": "registry+[..]", + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "name": "bar", + "src_path": "[..]lib.rs" + } + ], + "version": "0.0.1" + }, + { + "dependencies": [ + { + "features": [], + "kind": null, + "name": "bar", + "optional": false, + "req": "*", + "source": "registry+[..]", + "target": null, + "uses_default_features": true, + "rename": null + } + ], + "features": {}, + "id": "foo 0.5.0 (path+file:[..]foo)", + "manifest_path": "[..]Cargo.toml", + "name": "foo", + "source": null, + "license": "MIT", + "license_file": null, + "description": "foo", + "targets": [ + { + "kind": [ + "bin" + ], + "crate_types": [ + "bin" + ], + "name": "foo", + "src_path": "[..]foo.rs" + } + ], + "version": "0.5.0" + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [ + "bar 0.0.1 (registry+[..])" + ], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + }, + { + "dependencies": [ + "baz 0.0.1 (registry+[..])" + ], + "features": [], + "id": "bar 0.0.1 (registry+[..])" + }, + { + "dependencies": [], + "features": [], + "id": "baz 0.0.1 (registry+[..])" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn example() { + let p = project("foo") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" + "#, + ) + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.1.0", + "id": "foo[..]", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "bin" ], + "name": "ex", + "src_path": "[..][/]foo[/]examples[/]ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "features": [], + "dependencies": [] + } + ] + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn example_lib() { + let p = project("foo") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" +crate-type = ["rlib", "dylib"] + "#, + ) + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.1.0", + "id": "foo[..]", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "foo", + "src_path": "[..][/]foo[/]src[/]lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "rlib", "dylib" ], + "name": "ex", + "src_path": "[..][/]foo[/]examples[/]ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml" + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "features": [], + "dependencies": [] + } + ] + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ); +} + +#[test] +fn workspace_metadata() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("metadata"), + execs().with_status(0).with_json( + r#" + { + "packages": [ + { + "name": "bar", + "version": "0.5.0", + "id": "bar[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "bar", + "src_path": "[..]bar[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar[/]Cargo.toml" + }, + { + "name": "baz", + "version": "0.5.0", + "id": "baz[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "baz", + "src_path": "[..]baz[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz[/]Cargo.toml" + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "features": [], + "id": "baz 0.5.0 (path+file:[..]baz)" + }, + { + "dependencies": [], + "features": [], + "id": "bar 0.5.0 (path+file:[..]bar)" + } + ], + "root": null + }, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ) +} + +#[test] +fn workspace_metadata_no_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("metadata").arg("--no-deps"), + execs().with_status(0).with_json( + r#" + { + "packages": [ + { + "name": "bar", + "version": "0.5.0", + "id": "bar[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "name": "bar", + "src_path": "[..]bar[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar[/]Cargo.toml" + }, + { + "name": "baz", + "version": "0.5.0", + "id": "baz[..]", + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": ["lib"], + "name": "baz", + "src_path": "[..]baz[/]src[/]lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz[/]Cargo.toml" + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": null, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" + }"#, + ), + ) +} + +#[test] +fn cargo_metadata_with_invalid_manifest() { + let p = project("foo").file("Cargo.toml", "").build(); + + assert_that( + p.cargo("metadata").arg("--format-version").arg("1"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + virtual manifests must be configured with [workspace]", + ), + ) +} + +const MANIFEST_OUTPUT: &'static str = r#" +{ + "packages": [{ + "name":"foo", + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "source":null, + "dependencies":[], + "license": null, + "license_file": null, + "description": null, + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..][/]foo[/]src[/]foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml" + }], + "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], + "resolve": null, + "target_directory": "[..]foo[/]target", + "version": 1, + "workspace_root": "[..][/]foo" +}"#; + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata") + .arg("--no-deps") + .arg("--manifest-path") + .arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata") + .arg("--no-deps") + .arg("--manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata") + .arg("--no-deps") + .arg("--manifest-path") + .arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ), + ); +} + +#[test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata") + .arg("--no-deps") + .arg("--manifest-path") + .arg(p.root()) + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ), + ); +} + +#[test] +fn cargo_metadata_no_deps_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata").arg("--no-deps").cwd(p.root()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} + +#[test] +fn cargo_metadata_bad_version() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("metadata") + .arg("--no-deps") + .arg("--format-version") + .arg("2") + .cwd(p.root()), + execs().with_status(1).with_stderr_contains( + "\ +error: '2' isn't a valid value for '--format-version ' +[possible values: 1] +", + ), + ); +} + +#[test] +fn multiple_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + b = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("metadata").arg("--features").arg("a b"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/net_config.rs b/tests/testsuite/net_config.rs new file mode 100644 index 000000000..46417ca69 --- /dev/null +++ b/tests/testsuite/net_config.rs @@ -0,0 +1,75 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn net_retry_loads_from_config() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:11/foo/bar" + "#, + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + r#" + [net] + retry=1 + [http] + timeout=1 + "#, + ) + .build(); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr_contains( + "[WARNING] spurious network error \ + (1 tries remaining): [..]", + ), + ); +} + +#[test] +fn net_retry_git_outputs_warning() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:11/foo/bar" + "#, + ) + .file( + ".cargo/config", + r#" + [http] + timeout=1 + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("build").arg("-v").arg("-j").arg("1"), + execs() + .with_status(101) + .with_stderr_contains( + "[WARNING] spurious network error \ + (2 tries remaining): [..]", + ) + .with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]"), + ); +} diff --git a/tests/testsuite/new.rs b/tests/testsuite/new.rs new file mode 100644 index 000000000..e5522f72d --- /dev/null +++ b/tests/testsuite/new.rs @@ -0,0 +1,547 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::env; + +use cargotest; +use cargo::util::ProcessBuilder; +use cargotest::process; +use cargotest::support::{execs, paths}; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; +use tempfile; + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut p = cargotest::cargo_process(); + p.arg(s); + p +} + +fn create_empty_gitconfig() { + // This helps on Windows where libgit2 is very aggressive in attempting to + // find a git config file. + let gitconfig = paths::home().join(".gitconfig"); + File::create(gitconfig).unwrap(); +} + +#[test] +fn simple_lib() { + assert_that( + cargo_process("new") + .arg("--lib") + .arg("foo") + .arg("--vcs") + .arg("none") + .env("USER", "foo"), + execs() + .with_status(0) + .with_stderr("[CREATED] library `foo` project"), + ); + + assert_that(&paths::root().join("foo"), existing_dir()); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/lib.rs"), existing_file()); + assert_that( + &paths::root().join("foo/.gitignore"), + is_not(existing_file()), + ); + + let lib = paths::root().join("foo/src/lib.rs"); + let mut contents = String::new(); + File::open(&lib) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!( + contents, + r#"#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} +"# + ); + + assert_that( + cargo_process("build").cwd(&paths::root().join("foo")), + execs().with_status(0), + ); +} + +#[test] +fn simple_bin() { + assert_that( + cargo_process("new") + .arg("--bin") + .arg("foo") + .env("USER", "foo"), + execs() + .with_status(0) + .with_stderr("[CREATED] binary (application) `foo` project"), + ); + + assert_that(&paths::root().join("foo"), existing_dir()); + assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); + assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); + + assert_that( + cargo_process("build").cwd(&paths::root().join("foo")), + execs().with_status(0), + ); + assert_that( + &paths::root().join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)), + existing_file(), + ); +} + +#[test] +fn both_lib_and_bin() { + assert_that( + cargo_process("new") + .arg("--lib") + .arg("--bin") + .arg("foo") + .env("USER", "foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] can't specify both lib and binary outputs"), + ); +} + +#[test] +fn simple_git() { + // Run inside a temp directory so that cargo will initialize a git repo. + // If this ran inside paths::root() it would detect that we are already + // inside a git repo and skip the initialization. + let td = tempfile::Builder::new().prefix("cargo").tempdir().unwrap(); + assert_that( + cargo_process("new") + .arg("--lib") + .arg("foo") + .cwd(td.path()) + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that(td.path(), existing_dir()); + assert_that(&td.path().join("foo/Cargo.toml"), existing_file()); + assert_that(&td.path().join("foo/src/lib.rs"), existing_file()); + assert_that(&td.path().join("foo/.git"), existing_dir()); + assert_that(&td.path().join("foo/.gitignore"), existing_file()); + + assert_that( + cargo_process("build").cwd(&td.path().join("foo")), + execs().with_status(0), + ); +} + +#[test] +fn no_argument() { + assert_that( + cargo_process("new"), + execs().with_status(1).with_stderr_contains( + "\ +error: The following required arguments were not provided: + +", + ), + ); +} + +#[test] +fn existing() { + let dst = paths::root().join("foo"); + fs::create_dir(&dst).unwrap(); + assert_that( + cargo_process("new").arg("foo"), + execs().with_status(101).with_stderr(format!( + "[ERROR] destination `{}` already exists\n\n\ + Use `cargo init` to initialize the directory", + dst.display() + )), + ); +} + +#[test] +fn invalid_characters() { + assert_that( + cargo_process("new").arg("foo.rs"), + execs().with_status(101).with_stderr( + "\ +[ERROR] Invalid character `.` in crate name: `foo.rs` +use --name to override crate name", + ), + ); +} + +#[test] +fn reserved_name() { + assert_that( + cargo_process("new").arg("test"), + execs().with_status(101).with_stderr( + "\ + [ERROR] The name `test` cannot be used as a crate name\n\ + use --name to override crate name", + ), + ); +} + +#[test] +fn reserved_binary_name() { + assert_that( + cargo_process("new").arg("--bin").arg("incremental"), + execs().with_status(101).with_stderr( + "\ + [ERROR] The name `incremental` cannot be used as a crate name\n\ + use --name to override crate name", + ), + ); +} + +#[test] +fn keyword_name() { + assert_that( + cargo_process("new").arg("pub"), + execs().with_status(101).with_stderr( + "\ + [ERROR] The name `pub` cannot be used as a crate name\n\ + use --name to override crate name", + ), + ); +} + +#[test] +fn finds_author_user() { + create_empty_gitconfig(); + assert_that( + cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[test] +fn finds_author_user_escaped() { + create_empty_gitconfig(); + assert_that( + cargo_process("new").arg("foo").env("USER", "foo \"bar\""), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); +} + +#[test] +fn finds_author_username() { + create_empty_gitconfig(); + assert_that( + cargo_process("new") + .arg("foo") + .env_remove("USER") + .env("USERNAME", "foo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[test] +fn finds_author_priority() { + assert_that( + cargo_process("new") + .arg("foo") + .env("USER", "bar2") + .env("EMAIL", "baz2") + .env("CARGO_NAME", "bar") + .env("CARGO_EMAIL", "baz"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_author_email() { + create_empty_gitconfig(); + assert_that( + cargo_process("new") + .arg("foo") + .env("USER", "bar") + .env("EMAIL", "baz"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_author_git() { + process("git") + .args(&["config", "--global", "user.name", "bar"]) + .exec() + .unwrap(); + process("git") + .args(&["config", "--global", "user.email", "baz"]) + .exec() + .unwrap(); + assert_that( + cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_local_author_git() { + process("git").args(&["init"]).exec().unwrap(); + process("git") + .args(&["config", "--global", "user.name", "foo"]) + .exec() + .unwrap(); + process("git") + .args(&["config", "--global", "user.email", "foo@bar"]) + .exec() + .unwrap(); + + // Set local git user config + process("git") + .args(&["config", "user.name", "bar"]) + .exec() + .unwrap(); + process("git") + .args(&["config", "user.email", "baz"]) + .exec() + .unwrap(); + assert_that( + cargo_process("init").env("USER", "foo"), + execs().with_status(0), + ); + + let toml = paths::root().join("Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[test] +fn finds_git_email() { + assert_that( + cargo_process("new") + .arg("foo") + .env("GIT_AUTHOR_NAME", "foo") + .env("GIT_AUTHOR_EMAIL", "gitfoo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#), contents); +} + +#[test] +fn finds_git_author() { + create_empty_gitconfig(); + assert_that( + cargo_process("new") + .arg("foo") + .env_remove("USER") + .env("GIT_COMMITTER_NAME", "gitfoo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["gitfoo"]"#)); +} + +#[test] +fn author_prefers_cargo() { + process("git") + .args(&["config", "--global", "user.name", "foo"]) + .exec() + .unwrap(); + process("git") + .args(&["config", "--global", "user.email", "bar"]) + .exec() + .unwrap(); + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")) + .unwrap() + .write_all( + br#" + [cargo-new] + name = "new-foo" + email = "new-bar" + vcs = "none" + "#, + ) + .unwrap(); + + assert_that( + cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0), + ); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["new-foo "]"#)); + assert!(!root.join("foo/.gitignore").exists()); +} + +#[test] +fn git_prefers_command_line() { + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")) + .unwrap() + .write_all( + br#" + [cargo-new] + vcs = "none" + name = "foo" + email = "bar" + "#, + ) + .unwrap(); + + assert_that( + cargo_process("new") + .arg("foo") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + assert!(paths::root().join("foo/.gitignore").exists()); +} + +#[test] +fn subpackage_no_git() { + assert_that( + cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0), + ); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + assert_that( + cargo_process("new") + .arg("foo/components/subcomponent") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that( + &paths::root().join("foo/components/subcomponent/.git"), + is_not(existing_file()), + ); + assert_that( + &paths::root().join("foo/components/subcomponent/.gitignore"), + is_not(existing_file()), + ); +} + +#[test] +fn subpackage_git_with_vcs_arg() { + assert_that( + cargo_process("new").arg("foo").env("USER", "foo"), + execs().with_status(0), + ); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + assert_that( + cargo_process("new") + .arg("foo/components/subcomponent") + .arg("--vcs") + .arg("git") + .env("USER", "foo"), + execs().with_status(0), + ); + + assert_that( + &paths::root().join("foo/components/subcomponent/.git"), + existing_dir(), + ); + assert_that( + &paths::root().join("foo/components/subcomponent/.gitignore"), + existing_file(), + ); +} + +#[test] +fn unknown_flags() { + assert_that( + cargo_process("new").arg("foo").arg("--flag"), + execs().with_status(1).with_stderr_contains( + "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", + ), + ); +} + +#[test] +fn explicit_invalid_name_not_suggested() { + assert_that( + cargo_process("new") + .arg("--name") + .arg("10-invalid") + .arg("a"), + execs().with_status(101).with_stderr( + "[ERROR] Package names starting with a digit cannot be used as a crate name", + ), + ); +} diff --git a/tests/testsuite/overrides.rs b/tests/testsuite/overrides.rs new file mode 100644 index 000000000..80325197f --- /dev/null +++ b/tests/testsuite/overrides.rs @@ -0,0 +1,1627 @@ +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn override_simple() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn missing_version() { + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + foo = { git = 'https://example.com' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a version to replace, but `[..]foo` does not +", + ), + ); +} + +#[test] +fn invalid_semver_version() { + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + + [replace] + "foo:*" = { git = 'https://example.com' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a valid semver version to replace, but `foo:*` does not +", + ), + ); +} + +#[test] +fn different_version() { + Package::new("foo", "0.2.0").publish(); + Package::new("foo", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = "0.2.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for [..] +", + ), + ); +} + +#[test] +fn transitive() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.2.0") + .dep("foo", "0.1.0") + .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] bar v0.2.0 (registry [..]) +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.2.0 +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn persists_across_rebuilds() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn replace_registry_with_path() { + Package::new("foo", "0.1.0").publish(); + + let _ = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = { path = "../foo" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn use_a_spec_to_select() { + Package::new("foo", "0.1.1") + .file("src/lib.rs", "pub fn foo1() {}") + .publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.1") + .dep("foo", "0.2") + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { foo::foo3(); } + ", + ) + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo3() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + foo = "0.1" + + [replace] + "foo:0.2.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + extern crate bar; + + pub fn local() { + foo::foo1(); + bar::bar(); + } + ", + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] [..] +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn override_adds_some_deps() { + Package::new("foo", "0.1.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[UPDATING] git repository `[..]` +[DOWNLOADING] foo v0.1.1 (registry [..]) +[COMPILING] foo v0.1.1 +[COMPILING] bar v0.1.0 ([..]) +[COMPILING] local v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + + Package::new("foo", "0.1.2").publish(); + assert_that( + p.cargo("update") + .arg("-p") + .arg(&format!("{}#bar", foo.url())), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +", + ), + ); + assert_that( + p.cargo("update") + .arg("-p") + .arg("https://github.com/rust-lang/crates.io-index#bar"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +", + ), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn locked_means_locked_yes_no_seriously_i_mean_locked() { + // this in theory exercises #2041 + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn override_wrong_name() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +error: no matching package for override `[..]foo:0.1.0` found +location searched: file://[..] +version required: = 0.1.0 +", + ), + ); +} + +#[test] +fn override_with_nothing() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("src/lib.rs", "") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update file://[..] + +Caused by: + Could not find Cargo.toml in `[..]` +", + ), + ); +} + +#[test] +fn override_wrong_version() { + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [replace] + "foo:0.1.0" = { git = 'https://example.com', version = '0.2.0' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for `[..]foo:0.1.0` +", + ), + ); +} + +#[test] +fn multiple_specs() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + + [replace."https://github.com/rust-lang/crates.io-index#foo:0.1.0"] + git = '{0}' + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[UPDATING] git repository [..] +error: overlapping replacement specifications found: + + * [..] + * [..] + +both specifications match: foo v0.1.0 +", + ), + ); +} + +#[test] +fn test_override_dep() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("test").arg("-p").arg("foo"), + execs().with_status(101).with_stderr_contains( + "\ +error: There are multiple `foo` packages in your project, and the [..] +Please re-run this command with [..] + [..]#foo:0.1.0 + [..]#foo:0.1.0 +", + ), + ); +} + +#[test] +fn update() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that( + p.cargo("update"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] git repository `[..]` +", + ), + ); +} + +// local -> near -> far +// near is overridden with itself +#[test] +fn no_override_self() { + let deps = git::repo(&paths::root().join("override")) + .file( + "far/Cargo.toml", + r#" + [package] + name = "far" + version = "0.1.0" + authors = [] + "#, + ) + .file("far/src/lib.rs", "") + .file( + "near/Cargo.toml", + r#" + [package] + name = "near" + version = "0.1.0" + authors = [] + + [dependencies] + far = { path = "../far" } + "#, + ) + .file( + "near/src/lib.rs", + r#" + #![no_std] + pub extern crate far; + "#, + ) + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + near = {{ git = '{0}' }} + + [replace] + "near:0.1.0" = {{ git = '{0}' }} + "#, + deps.url() + ), + ) + .file( + "src/lib.rs", + r#" + #![no_std] + pub extern crate near; + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("--verbose"), execs().with_status(0)); +} + +#[test] +fn broken_path_override_warns() { + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a1" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a1/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("a1/src/lib.rs", "") + .file( + "a2/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("a2/src/lib.rs", "") + .file( + ".cargo/config", + r#" + paths = ["a2"] + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] [..] +warning: path override for crate `a` has altered the original list of +dependencies; the dependency on `foo` was either added or +modified to not match the previously resolved version + +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +http://doc.crates.io/specifying-dependencies.html#overriding-dependencies + +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn override_an_override() { + Package::new("chrono", "0.2.0") + .dep("serde", "< 0.9") + .publish(); + Package::new("serde", "0.7.0") + .file("src/lib.rs", "pub fn serde07() {}") + .publish(); + Package::new("serde", "0.8.0") + .file("src/lib.rs", "pub fn serde08() {}") + .publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + chrono = "0.2" + serde = "0.8" + + [replace] + "chrono:0.2.0" = { path = "chrono" } + "serde:0.8.0" = { path = "serde" } + "#, + ) + .file( + "Cargo.lock", + r#" + [[package]] + name = "local" + version = "0.0.1" + dependencies = [ + "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "chrono" + version = "0.2.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "chrono 0.2.0" + + [[package]] + name = "chrono" + version = "0.2.0" + dependencies = [ + "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "serde" + version = "0.7.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "serde" + version = "0.8.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "serde 0.8.0" + + [[package]] + name = "serde" + version = "0.8.0" + "#, + ) + .file( + "src/lib.rs", + " + extern crate chrono; + extern crate serde; + + pub fn local() { + chrono::chrono(); + serde::serde08_override(); + } + ", + ) + .file( + "chrono/Cargo.toml", + r#" + [package] + name = "chrono" + version = "0.2.0" + authors = [] + + [dependencies] + serde = "< 0.9" + "#, + ) + .file( + "chrono/src/lib.rs", + " + extern crate serde; + pub fn chrono() { + serde::serde07(); + } + ", + ) + .file( + "serde/Cargo.toml", + r#" + [package] + name = "serde" + version = "0.8.0" + authors = [] + "#, + ) + .file( + "serde/src/lib.rs", + " + pub fn serde08_override() {} + ", + ) + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn overriding_nonexistent_no_spurious() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{url}' }} + "bar:0.1.0" = {{ git = '{url}' }} + "#, + url = foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr( + "\ +[WARNING] package replacement is not used: [..]bar:0.1.0 +[FINISHED] [..] +", + ) + .with_stdout(""), + ); +} + +#[test] +fn no_warnings_when_replace_is_used_in_another_workspace_member() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = [ "first_crate", "second_crate"] + + [replace] + "foo:0.1.0" = { path = "local_foo" }"#, + ) + .file( + "first_crate/Cargo.toml", + r#" + [package] + name = "first_crate" + version = "0.1.0" + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("first_crate/src/lib.rs", "") + .file( + "second_crate/Cargo.toml", + r#" + [package] + name = "second_crate" + version = "0.1.0" + "#, + ) + .file("second_crate/src/lib.rs", "") + .file( + "local_foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("local_foo/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").cwd(p.root().join("first_crate")), + execs().with_status(0).with_stdout("").with_stderr( + "\ +[UPDATING] registry `[..]` +[COMPILING] foo v0.1.0 ([..]) +[COMPILING] first_crate v0.1.0 ([..]) +[FINISHED] [..]", + ), + ); + + assert_that( + p.cargo("build").cwd(p.root().join("second_crate")), + execs().with_status(0).with_stdout("").with_stderr( + "\ +[COMPILING] second_crate v0.1.0 ([..]) +[FINISHED] [..]", + ), + ); +} + +#[test] +fn override_to_path_dep() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "foo/bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("foo/bar/src/lib.rs", "") + .file( + ".cargo/config", + r#" + paths = ["foo"] + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn replace_to_path_dep() { + Package::new("foo", "0.1.0").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = { path = "foo" } + "#, + ) + .file("src/lib.rs", "extern crate foo;") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "foo/src/lib.rs", + " + extern crate bar; + + pub fn foo() { + bar::bar(); + } + ", + ) + .file( + "foo/bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn paths_ok_with_optional() { + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "foo2/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#, + ) + .file("foo2/src/lib.rs", "") + .file( + ".cargo/config", + r#" + paths = ["foo2"] + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 ([..]foo2) +[COMPILING] local v0.0.1 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn paths_add_optional_bad() { + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "foo2/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", optional = true } + "#, + ) + .file("foo2/src/lib.rs", "") + .file( + ".cargo/config", + r#" + paths = ["foo2"] + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr_contains( + "\ +warning: path override for crate `foo` has altered the original list of +dependencies; the dependency on `bar` was either added or\ +", + ), + ); +} + +#[test] +fn override_with_default_feature() { + Package::new("another", "0.1.0").publish(); + Package::new("another", "0.1.1").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("local") + .file( + "Cargo.toml", + r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", default-features = false } + another = "0.1" + another2 = { path = "another2" } + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + + fn main() { + bar::bar(); + } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [features] + default = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "default")] + pub fn bar() {} + "#, + ) + .file( + "another2/Cargo.toml", + r#" + [package] + name = "another2" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", default-features = false } + "#, + ) + .file("another2/src/lib.rs", "") + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn override_plus_dep() { + Package::new("bar", "0.1.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr_contains("error: cyclic package dependency: [..]"), + ); +} diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs new file mode 100644 index 000000000..300b69d63 --- /dev/null +++ b/tests/testsuite/package.rs @@ -0,0 +1,1423 @@ +use std; +use std::fs::File; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use git2; +use cargotest::{cargo_process, process, ChannelChanger}; +use cargotest::support::{cargo_exe, execs, git, paths, project, registry, path2url}; +use cargotest::support::registry::Package; +use flate2::read::GzDecoder; +use hamcrest::{assert_that, contains, existing_file}; +use tar::Archive; + +#[test] +fn simple() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("src/bar.txt", "") // should be ignored when packaging + .build(); + + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] manifest has no documentation[..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + &p.root().join("target/package/foo-0.0.1.crate"), + existing_file(), + ); + assert_that( + p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout( + "\ +Cargo.toml +src[/]main.rs +", + ), + ); + assert_that(p.cargo("package"), execs().with_status(0).with_stdout("")); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + f.header().path() + ) + } +} + +#[test] +fn metadata_warning() { + let p = project("all") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +warning: manifest has no description, license, license-file, documentation, \ +homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + + let p = project("one") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +warning: manifest has no description, documentation, homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + + let p = project("all") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn package_verbose() { + let root = paths::root().join("all"); + let p = git::repo(&root) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + let mut cargo = cargo_process(); + cargo.cwd(p.root()); + assert_that(cargo.clone().arg("build"), execs().with_status(0)); + + println!("package main repo"); + assert_that( + cargo.clone().arg("package").arg("-v").arg("--no-verify"), + execs().with_status(0).with_stderr( + "\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +", + ), + ); + + println!("package sub-repo"); + assert_that( + cargo + .arg("package") + .arg("-v") + .arg("--no-verify") + .cwd(p.root().join("a")), + execs().with_status(0).with_stderr( + "\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] a v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +", + ), + ); +} + +#[test] +fn package_verification() { + let p = project("all") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn path_dependency_no_version() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("package"), + execs().with_status(101).with_stderr( + "\ +[WARNING] manifest has no documentation, homepage or repository. +See http://doc.crates.io/manifest.html#package-metadata for more info. +[ERROR] all path dependencies must have a version specified when packaging. +dependency `bar` does not specify a version. +", + ), + ); +} + +#[test] +fn exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = [ + "*.txt", + # file in root + "file_root_1", # NO_CHANGE (ignored) + "/file_root_2", # CHANGING (packaged -> ignored) + "file_root_3/", # NO_CHANGE (packaged) + "file_root_4/*", # NO_CHANGE (packaged) + "file_root_5/**", # NO_CHANGE (packaged) + # file in sub-dir + "file_deep_1", # CHANGING (packaged -> ignored) + "/file_deep_2", # NO_CHANGE (packaged) + "file_deep_3/", # NO_CHANGE (packaged) + "file_deep_4/*", # NO_CHANGE (packaged) + "file_deep_5/**", # NO_CHANGE (packaged) + # dir in root + "dir_root_1", # CHANGING (packaged -> ignored) + "/dir_root_2", # CHANGING (packaged -> ignored) + "dir_root_3/", # CHANGING (packaged -> ignored) + "dir_root_4/*", # NO_CHANGE (ignored) + "dir_root_5/**", # NO_CHANGE (ignored) + # dir in sub-dir + "dir_deep_1", # CHANGING (packaged -> ignored) + "/dir_deep_2", # NO_CHANGE + "dir_deep_3/", # CHANGING (packaged -> ignored) + "dir_deep_4/*", # CHANGING (packaged -> ignored) + "dir_deep_5/**", # CHANGING (packaged -> ignored) + ] + "#) + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("bar.txt", "") + .file("src/bar.txt", "") + // file in root + .file("file_root_1", "") + .file("file_root_2", "") + .file("file_root_3", "") + .file("file_root_4", "") + .file("file_root_5", "") + // file in sub-dir + .file("some_dir/file_deep_1", "") + .file("some_dir/file_deep_2", "") + .file("some_dir/file_deep_3", "") + .file("some_dir/file_deep_4", "") + .file("some_dir/file_deep_5", "") + // dir in root + .file("dir_root_1/some_dir/file", "") + .file("dir_root_2/some_dir/file", "") + .file("dir_root_3/some_dir/file", "") + .file("dir_root_4/some_dir/file", "") + .file("dir_root_5/some_dir/file", "") + // dir in sub-dir + .file("some_dir/dir_deep_1/some_dir/file", "") + .file("some_dir/dir_deep_2/some_dir/file", "") + .file("some_dir/dir_deep_3/some_dir/file", "") + .file("some_dir/dir_deep_4/some_dir/file", "") + .file("some_dir/dir_deep_5/some_dir/file", "") + .build(); + + assert_that( + p.cargo("package").arg("--no-verify").arg("-v"), + execs().with_status(0).with_stdout("").with_stderr( + "\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[WARNING] [..] file `dir_root_1[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `dir_root_2[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `dir_root_3[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_1[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]dir_deep_3[/]some_dir[/]file` WILL be excluded [..] +See [..] +[WARNING] [..] file `some_dir[/]file_deep_1` WILL be excluded [..] +See [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +", + ), + ); + + assert_that( + &p.root().join("target/package/foo-0.0.1.crate"), + existing_file(), + ); + + assert_that( + p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout( + "\ +Cargo.toml +dir_root_1[/]some_dir[/]file +dir_root_2[/]some_dir[/]file +dir_root_3[/]some_dir[/]file +file_root_3 +file_root_4 +file_root_5 +some_dir[/]dir_deep_1[/]some_dir[/]file +some_dir[/]dir_deep_2[/]some_dir[/]file +some_dir[/]dir_deep_3[/]some_dir[/]file +some_dir[/]dir_deep_4[/]some_dir[/]file +some_dir[/]dir_deep_5[/]some_dir[/]file +some_dir[/]file_deep_1 +some_dir[/]file_deep_2 +some_dir[/]file_deep_3 +some_dir[/]file_deep_4 +some_dir[/]file_deep_5 +src[/]main.rs +", + ), + ); +} + +#[test] +fn include() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + include = ["foo.txt", "**/*.rs", "Cargo.toml"] + "#) + .file("foo.txt", "") + .file("src/main.rs", r#" + fn main() { println!("hello"); } + "#) + .file("src/bar.txt", "") // should be ignored when packaging + .build(); + + assert_that( + p.cargo("package").arg("--no-verify").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[WARNING] manifest has no description[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] [..] +[ARCHIVING] [..] +[ARCHIVING] [..] +", + ), + ); +} + +#[test] +fn package_lib_with_bin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + fn main() {} + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("package").arg("-v"), execs().with_status(0)); +} + +#[test] +fn package_git_submodule() { + let project = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = ["foo@example.com"] + license = "MIT" + description = "foo" + repository = "foo" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + }).unwrap(); + let library = git::new("bar", |library| library.file("Makefile", "all:")).unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("bar")); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); + repository + .reset( + &repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, + None, + ) + .unwrap(); + + assert_that( + cargo_process() + .arg("package") + .cwd(project.root()) + .arg("--no-verify") + .arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[ARCHIVING] bar/Makefile"), + ); +} + +#[test] +fn no_duplicates_from_modified_tracked_files() { + let root = paths::root().join("all"); + let p = git::repo(&root) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + File::create(p.root().join("src/main.rs")) + .unwrap() + .write_all( + br#" + fn main() { println!("A change!"); } + "#, + ) + .unwrap(); + let mut cargo = cargo_process(); + cargo.cwd(p.root()); + assert_that(cargo.clone().arg("build"), execs().with_status(0)); + assert_that( + cargo.arg("package").arg("--list"), + execs().with_status(0).with_stdout( + "\ +Cargo.toml +src/main.rs +", + ), + ); +} + +#[test] +fn ignore_nested() { + let cargo_toml = r#" + [project] + name = "nested" + version = "0.0.1" + authors = [] + license = "MIT" + description = "nested" + "#; + let main_rs = r#" + fn main() { println!("hello"); } + "#; + let p = project("nested") + .file("Cargo.toml", cargo_toml) + .file("src/main.rs", main_rs) + // If a project happens to contain a copy of itself, we should + // ignore it. + .file("a_dir/nested/Cargo.toml", cargo_toml) + .file("a_dir/nested/src/main.rs", main_rs) + .build(); + + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] manifest has no documentation[..] +See http://doc.crates.io/manifest.html#package-metadata for more info. +[PACKAGING] nested v0.0.1 ({dir}) +[VERIFYING] nested v0.0.1 ({dir}) +[COMPILING] nested v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + &p.root().join("target/package/nested-0.0.1.crate"), + existing_file(), + ); + assert_that( + p.cargo("package").arg("-l"), + execs().with_status(0).with_stdout( + "\ +Cargo.toml +src[..]main.rs +", + ), + ); + assert_that(p.cargo("package"), execs().with_status(0).with_stdout("")); + + let f = File::open(&p.root().join("target/package/nested-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"nested-0.0.1/Cargo.toml" || fname == b"nested-0.0.1/Cargo.toml.orig" + || fname == b"nested-0.0.1/src/main.rs", + "unexpected filename: {:?}", + f.header().path() + ) + } +} + +#[cfg(unix)] // windows doesn't allow these characters in filenames +#[test] +fn package_weird_characters() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .file("src/:foo", "") + .build(); + + assert_that( + p.cargo("package"), + execs().with_status(101).with_stderr( + "\ +warning: [..] +See [..] +[PACKAGING] foo [..] +[ERROR] failed to prepare local package for uploading + +Caused by: + cannot package a filename with a special character `:`: src/:foo +", + ), + ); +} + +#[test] +fn repackage_on_source_change() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + assert_that(p.cargo("package"), execs().with_status(0)); + + // Add another source file + let mut file = File::create(p.root().join("src").join("foo.rs")).unwrap_or_else(|e| { + panic!( + "could not create file {}: {}", + p.root().join("src/foo.rs").display(), + e + ) + }); + + file.write_all( + br#" + fn main() { println!("foo"); } + "#, + ).unwrap(); + std::mem::drop(file); + + let mut pro = process(&cargo_exe()); + pro.arg("package").cwd(p.root()); + + // Check that cargo rebuilds the tarball + assert_that( + pro, + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + + // Check that the tarball contains the added file + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let entries = ar.entries().unwrap(); + let entry_paths = entries + .map(|entry| entry.unwrap().path().unwrap().into_owned()) + .collect::>(); + assert_that( + &entry_paths, + contains(vec![PathBuf::from("foo-0.0.1/src/foo.rs")]), + ); +} + +#[test] +#[cfg(unix)] +fn broken_symlink() { + use std::os::unix::fs; + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = 'foo' + documentation = 'foo' + homepage = 'foo' + repository = 'foo' + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + + assert_that( + p.cargo("package").arg("-v"), + execs().with_status(101).with_stderr_contains( + "\ +error: failed to prepare local package for uploading + +Caused by: + failed to open for archiving: `[..]foo.rs` + +Caused by: + [..] +", + ), + ); +} + +#[test] +fn do_not_package_if_repository_is_dirty() { + let p = project("foo").build(); + + // Create a Git repository containing a minimal Rust project. + let _ = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Modify Cargo.toml without committing the change. + p.change_file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + # change + "#, + ); + + assert_that( + p.cargo("package"), + execs().with_status(101).with_stderr( + "\ +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +Cargo.toml + +to proceed despite this, pass the `--allow-dirty` flag +", + ), + ); +} + +#[test] +fn generated_manifest() { + Package::new("abc", "1.0.0").publish(); + Package::new("def", "1.0.0").alternative(true).publish(); + Package::new("ghi", "1.0.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + + [project.metadata] + foo = 'bar' + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + def = { version = "1.0", registry = "alternative" } + ghi = "1.0" + abc = "1.0" + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("package") + .masquerade_as_nightly_cargo() + .arg("--no-verify"), + execs().with_status(0), + ); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let mut entry = ar.entries() + .unwrap() + .map(|f| f.unwrap()) + .find(|e| e.path().unwrap().ends_with("Cargo.toml")) + .unwrap(); + let mut contents = String::new(); + entry.read_to_string(&mut contents).unwrap(); + // BTreeMap makes the order of dependencies in the generated file deterministic + // by sorting alphabetically + assert_eq!( + &contents[..], + &*format!( + r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +cargo-features = ["alternative-registries"] + +[package] +name = "foo" +version = "0.0.1" +authors = [] +exclude = ["*.txt"] +description = "foo" +license = "MIT" + +[package.metadata] +foo = "bar" +[dependencies.abc] +version = "1.0" + +[dependencies.bar] +version = "0.1" + +[dependencies.def] +version = "1.0" +registry-index = "{}" + +[dependencies.ghi] +version = "1.0" +"#, + registry::alt_registry() + ) + ); +} + +#[test] +fn ignore_workspace_specifier() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + authors = [] + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("package") + .arg("--no-verify") + .cwd(p.root().join("bar")), + execs().with_status(0), + ); + + let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let mut entry = ar.entries() + .unwrap() + .map(|f| f.unwrap()) + .find(|e| e.path().unwrap().ends_with("Cargo.toml")) + .unwrap(); + let mut contents = String::new(); + entry.read_to_string(&mut contents).unwrap(); + assert_eq!( + &contents[..], + r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g. crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "bar" +version = "0.1.0" +authors = [] +"# + ); +} + +#[test] +fn package_two_kinds_of_deps() { + Package::new("other", "1.0.0").publish(); + Package::new("other1", "1.0.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + other = "1.0" + other1 = { version = "1.0" } + "#, + ) + .file("src/main.rs", "") + .build(); + + assert_that( + p.cargo("package").arg("--no-verify"), + execs().with_status(0), + ); +} + +#[test] +fn test_epoch() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["epoch"] + [package] + name = "foo" + version = "0.0.1" + authors = [] + rust = "2018" + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), + execs() + // -Zepoch is still in flux and we're not passing -Zunstable-options + // from Cargo so it will probably error. Only partially match the output + // until stuff stabilizes + .with_stderr_contains(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -Zepoch=2018 -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +", dir = p.root().display(), url = p.url())), + ); +} + +#[test] +fn test_epoch_missing() { + // no epoch = 2015 + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["epoch"] + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), + execs() + // -Zepoch is still in flux and we're not passing -Zunstable-options + // from Cargo so it will probably error. Only partially match the output + // until stuff stabilizes + .with_stderr_contains(format!("\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -Zepoch=2015 -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +", dir = p.root().display(), url = p.url())), + ); +} + +#[test] +fn test_epoch_malformed() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["epoch"] + [package] + name = "foo" + version = "0.0.1" + authors = [] + rust = "chicken" + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr(format!( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `rust` key must be one of: `2015`, `2018` +" + )), + ); +} + +#[test] +fn test_epoch_nightly() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + rust = "2015" + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr(format!( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + epoches are unstable + +Caused by: + feature `epoch` is required + +consider adding `cargo-features = [\"epoch\"]` to the manifest +" + )), + ); +} + +#[test] +fn package_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["publish-lockfile"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish-lockfile = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("package").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] manifest has no documentation[..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + assert_that( + &p.root().join("target/package/foo-0.0.1.crate"), + existing_file(), + ); + assert_that( + p.cargo("package").arg("-l").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stdout( + "\ +Cargo.lock +Cargo.toml +src[/]main.rs +", + ), + ); + assert_that( + p.cargo("package").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stdout(""), + ); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/Cargo.lock" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + f.header().path() + ) + } +} + +#[test] +fn package_lockfile_git_repo() { + let p = project("foo").build(); + + // Create a Git repository containing a minimal Rust project. + let _ = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + cargo-features = ["publish-lockfile"] + + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + publish-lockfile = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("package").arg("-l").masquerade_as_nightly_cargo(), + execs().with_status(0).with_stdout( + "\ +Cargo.lock +Cargo.toml +src/main.rs +", + ), + ); +} + +#[test] +fn no_lock_file_with_library() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["publish-lockfile"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish-lockfile = true + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("package").masquerade_as_nightly_cargo(), + execs().with_status(0), + ); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for f in ar.entries().unwrap() { + let f = f.unwrap(); + let fname = f.header().path().unwrap(); + assert!(!fname.ends_with("Cargo.lock")); + } +} + +#[test] +fn lock_file_and_workspace() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + cargo-features = ["publish-lockfile"] + + [package] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish-lockfile = true + "#, + ) + .file("foo/src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("package") + .cwd(p.root().join("foo")) + .masquerade_as_nightly_cargo(), + execs().with_status(0), + ); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let mut rdr = GzDecoder::new(f); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + assert!(ar.entries().unwrap().into_iter().any(|f| { + let f = f.unwrap(); + let fname = f.header().path().unwrap(); + fname.ends_with("Cargo.lock") + })); +} diff --git a/tests/testsuite/patch.rs b/tests/testsuite/patch.rs new file mode 100644 index 000000000..2e47ba84f --- /dev/null +++ b/tests/testsuite/patch.rs @@ -0,0 +1,1111 @@ +use std::fs::{self, File}; +use std::io::{Read, Write}; + +use toml; +use cargotest::support::git; +use cargotest::support::paths; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn replace() { + Package::new("foo", "0.1.0").publish(); + Package::new("deep-foo", "0.1.0") + .file( + "src/lib.rs", + r#" + extern crate foo; + pub fn deep() { + foo::foo(); + } + "#, + ) + .dep("foo", "0.1.0") + .publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + deep-foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate foo; + extern crate deep_foo; + pub fn bar() { + foo::foo(); + deep_foo::deep(); + } + ", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] deep-foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] deep-foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build"), //.env("RUST_LOG", "trace"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn nonexistent() { + Package::new("baz", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn patch_git() { + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + + [patch.'{0}'] + foo = {{ path = "foo" }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "foo/src/lib.rs", + r#" + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn patch_to_git() { + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + pub fn bar() { + foo::foo(); + } + ", + ) + .build(); + + assert_that( + p.cargo("build"), //.env("RUST_LOG", "cargo=trace"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn unused() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file( + "foo/src/lib.rs", + r#" + not rust code + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + + // unused patch should be in the lock file + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock) + .unwrap(); + let toml: toml::Value = toml::from_str(&lock).unwrap(); + assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); + assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("foo")); + assert_eq!( + toml["patch"]["unused"][0]["version"].as_str(), + Some("0.2.0") + ); +} + +#[test] +fn unused_git() { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("bar") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn add_patch() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "# + )); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.1.0 (file://[..]) +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn add_ignored_patch() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.1.0 [..] +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "# + )); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn new_minor() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.1 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn transitive_new_minor() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + subdir = { path = 'subdir' } + + [patch.crates-io] + foo = { path = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .file( + "subdir/Cargo.toml", + r#" + [package] + name = "subdir" + version = "0.1.0" + authors = [] + + [dependencies] + foo = '0.1.0' + "#, + ) + .file("subdir/src/lib.rs", r#""#) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.1.1 [..] +[COMPILING] subdir v0.1.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn new_major() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2.0" + + [patch.crates-io] + foo = { path = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.2.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + Package::new("foo", "0.2.0").publish(); + assert_that(p.cargo("update"), execs().with_status(0)); + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.2.0" + "# + )); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[DOWNLOADING] foo v0.2.0 [..] +[COMPILING] foo v0.2.0 +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn transitive_new_major() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + subdir = { path = 'subdir' } + + [patch.crates-io] + foo = { path = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .file( + "subdir/Cargo.toml", + r#" + [package] + name = "subdir" + version = "0.1.0" + authors = [] + + [dependencies] + foo = '0.2.0' + "#, + ) + .file("subdir/src/lib.rs", r#""#) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +[COMPILING] foo v0.2.0 [..] +[COMPILING] subdir v0.1.0 [..] +[COMPILING] bar v0.0.1 (file://[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn remove_patch() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = { path = 'foo' } + bar = { path = 'bar' } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", r#""#) + .build(); + + // Generate a lock file where `bar` is unused + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file1 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file1) + .unwrap(); + + // Remove `bar` and generate a new lock file form the old one + File::create(p.root().join("Cargo.toml")) + .unwrap() + .write_all( + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [patch.crates-io] + foo = { path = 'foo' } + "#.as_bytes(), + ) + .unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file2 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file2) + .unwrap(); + + // Remove the lock file and build from scratch + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + assert_that(p.cargo("build"), execs().with_status(0)); + let mut lock_file3 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file3) + .unwrap(); + + assert!(lock_file1.contains("bar")); + assert_eq!(lock_file2, lock_file3); + assert_ne!(lock_file1, lock_file2); +} + +#[test] +fn non_crates_io() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [patch.some-other-source] + foo = { path = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid url `some-other-source`: relative URL without a base +", + ), + ); +} + +#[test] +fn replace_with_crates_io() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [patch.crates-io] + foo = "0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] [..] +error: failed to resolve patches for `[..]` + +Caused by: + patch for `foo` in `[..]` points to the same source, but patches must point \ + to different sources +", + ), + ); +} + +#[test] +fn patch_in_virtual() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + + [patch.crates-io] + foo = { path = "foo" } + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("bar/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} + +#[test] +fn patch_depends_on_another_patch() { + Package::new("foo", "0.1.0") + .file("src/lib.rs", "broken code") + .publish(); + + Package::new("bar", "0.1.0") + .dep("foo", "0.1") + .file("src/lib.rs", "broken code") + .publish(); + + let p = project("p") + .file( + "Cargo.toml", + r#" + [package] + name = "p" + authors = [] + version = "0.1.0" + + [dependencies] + foo = "0.1" + bar = "0.1" + + [patch.crates-io] + foo = { path = "foo" } + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.1" + authors = [] + "#, + ) + .file("foo/src/lib.rs", r#""#) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("bar/src/lib.rs", r#""#) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + // Nothing should be rebuilt, no registry should be updated. + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} diff --git a/tests/testsuite/path.rs b/tests/testsuite/path.rs new file mode 100644 index 000000000..c3eb71a0e --- /dev/null +++ b/tests/testsuite/path.rs @@ -0,0 +1,1316 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargo::util::process; +use cargotest; +use cargotest::sleep_ms; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::{execs, main_file, project}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file}; + +#[test] +#[cfg(not(windows))] // I have no idea why this is failing spuriously on + // Windows, for more info see #3466. +fn cargo_compile_with_nested_deps_shorthand() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + + version = "0.5.0" + path = "baz" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file( + "bar/baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "baz" + "#, + ) + .file( + "bar/baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "[COMPILING] baz v0.5.0 ({}/bar/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url() + )), + ); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_stdout("test passed\n").with_status(0), + ); + + println!("cleaning"); + assert_that( + p.cargo("clean").arg("-v"), + execs().with_stdout("").with_status(0), + ); + println!("building baz"); + assert_that( + p.cargo("build").arg("-p").arg("baz"), + execs().with_status(0).with_stderr(&format!( + "[COMPILING] baz v0.5.0 ({}/bar/baz)\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url() + )), + ); + println!("building foo"); + assert_that( + p.cargo("build").arg("-p").arg("foo"), + execs().with_status(0).with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url() + )), + ); +} + +#[test] +fn cargo_compile_with_root_dev_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project("bar") + .file( + "Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(101)) +} + +#[test] +fn cargo_compile_with_root_dev_deps_with_testing() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project("bar") + .file( + "Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_stderr( + "\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn cargo_compile_with_transitive_dev_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.baz] + + git = "git://example.com/path/to/nowhere" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url() + )), + ); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), execs().with_stdout("zoidberg\n")); +} + +#[test] +fn no_rebuild_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar() } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + // First time around we should compile both foo and bar + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url() + )), + ); + + sleep_ms(1000); + p.change_file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar(); } + "#, + ); + // Don't compile bar, but do recompile foo. + assert_that( + p.cargo("build"), + execs().with_stderr( + "\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ), + ); +} + +#[test] +fn deep_dependencies_trigger_rebuild() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar() } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + pub fn bar() { baz::baz() } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "baz" + "#, + ) + .file( + "baz/src/baz.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url() + )), + ); + assert_that(p.cargo("build"), execs().with_stdout("")); + + // Make sure an update to baz triggers a rebuild of bar + // + // We base recompilation off mtime, so sleep for at least a second to ensure + // that this write will change the mtime. + sleep_ms(1000); + File::create(&p.root().join("baz/src/baz.rs")) + .unwrap() + .write_all( + br#" + pub fn baz() { println!("hello!"); } + "#, + ) + .unwrap(); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url() + )), + ); + + // Make sure an update to bar doesn't trigger baz + sleep_ms(1000); + File::create(&p.root().join("bar/src/bar.rs")) + .unwrap() + .write_all( + br#" + extern crate baz; + pub fn bar() { println!("hello!"); baz::baz(); } + "#, + ) + .unwrap(); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url() + )), + ); +} + +#[test] +fn no_rebuild_two_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + [dependencies.baz] + path = "baz" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar() } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn bar() {} + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "baz" + "#, + ) + .file( + "baz/src/baz.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] baz v0.5.0 ({}/baz)\n\ + [COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url(), + p.url(), + p.url() + )), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(p.cargo("build"), execs().with_stdout("")); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn nested_deps_recompile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "src/bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "src/bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "bar" + "#, + ) + .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }") + .build(); + let bar = p.url(); + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/src/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + bar, + p.url() + )), + ); + sleep_ms(1000); + + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all( + br#" + fn main() {} + "#, + ) + .unwrap(); + + // This shouldn't recompile `bar` + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url() + )), + ); +} + +#[test] +fn error_message_for_missing_manifest() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + path = "src/bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/bar/not-a-manifest", "") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update file://[..] + +Caused by: + failed to read `[..]bar[/]Cargo.toml` + +Caused by: + [..] (os error [..]) +", + ), + ); +} + +#[test] +fn override_relative() { + let bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + fs::create_dir(&paths::root().join(".cargo")).unwrap(); + File::create(&paths::root().join(".cargo/config")) + .unwrap() + .write_all(br#"paths = ["bar"]"#) + .unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + "#, + bar.root().display() + ), + ) + .file("src/lib.rs", "") + .build(); + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn override_self() { + let bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project("foo"); + let root = p.root().clone(); + let p = p.file( + ".cargo/config", + &format!( + r#" + paths = ['{}'] + "#, + root.display() + ), + ).file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + + "#, + bar.root().display() + ), + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn override_path_dep() { + let bar = project("bar") + .file( + "p1/Cargo.toml", + r#" + [package] + name = "p1" + version = "0.5.0" + authors = [] + + [dependencies.p2] + path = "../p2" + "#, + ) + .file("p1/src/lib.rs", "") + .file( + "p2/Cargo.toml", + r#" + [package] + name = "p2" + version = "0.5.0" + authors = [] + "#, + ) + .file("p2/src/lib.rs", "") + .build(); + + let p = project("foo") + .file( + ".cargo/config", + &format!( + r#" + paths = ['{}', '{}'] + "#, + bar.root().join("p1").display(), + bar.root().join("p2").display() + ), + ) + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.p2] + path = '{}' + + "#, + bar.root().join("p2").display() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn path_dep_build_cmd() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#, + ) + .file( + "bar/build.rs", + r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#, + ) + .file( + "bar/src/bar.rs.in", + r#" + pub fn gimme() -> i32 { 0 } + "#, + ) + .build(); + p.root().join("bar").move_into_the_past(); + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url() + )), + ); + + assert_that(&p.bin("foo"), existing_file()); + + assert_that(process(&p.bin("foo")), execs().with_stdout("0\n")); + + // Touching bar.rs.in should cause the `build` command to run again. + { + let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); + file.unwrap() + .write_all(br#"pub fn gimme() -> i32 { 1 }"#) + .unwrap(); + } + + assert_that( + p.cargo("build"), + execs().with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}/bar)\n\ + [COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + p.url(), + p.url() + )), + ); + + assert_that(process(&p.bin("foo")), execs().with_stdout("1\n")); +} + +#[test] +fn dev_deps_no_rebuild_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + path = "bar" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar; + #[cfg(not(test))] pub fn foo() { env!("FOO"); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + assert_that( + p.cargo("build").env("FOO", "bar"), + execs().with_status(0).with_stderr(&format!( + "[COMPILING] foo v0.5.0 ({})\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + p.url() + )), + ); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] [..] v0.5.0 ({url}[..]) +[COMPILING] [..] v0.5.0 ({url}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + url = p.url() + )) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn custom_target_no_rebuild() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "a" } + [workspace] + members = ["a", "b"] + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#, + ) + .file("b/src/lib.rs", "") + .build(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + t!(fs::rename( + p.root().join("target"), + p.root().join("target_moved") + )); + assert_that( + p.cargo("build") + .arg("--manifest-path=b/Cargo.toml") + .env("CARGO_TARGET_DIR", "target_moved"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn override_and_depend() { + let p = project("foo") + .file( + "a/a1/Cargo.toml", + r#" + [project] + name = "a1" + version = "0.5.0" + authors = [] + [dependencies] + a2 = { path = "../a2" } + "#, + ) + .file("a/a1/src/lib.rs", "") + .file( + "a/a2/Cargo.toml", + r#" + [project] + name = "a2" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/a2/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a1 = { path = "../a/a1" } + a2 = { path = "../a/a2" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "b/.cargo/config", + r#" + paths = ["../a"] + "#, + ) + .build(); + assert_that( + p.cargo("build").cwd(p.root().join("b")), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a2 v0.5.0 ([..]) +[COMPILING] a1 v0.5.0 ([..]) +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn missing_path_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + paths = ["../whoa-this-does-not-exist"] + "#, + ) + .build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \ +(defined in `[..]`) + +Caused by: + failed to read directory `[..]` + +Caused by: + [..] (os error [..]) +", + ), + ); +} + +#[test] +fn invalid_path_dep_in_workspace_with_lockfile() { + Package::new("bar", "1.0.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + // Generate a lock file + assert_that(p.cargo("build"), execs().with_status(0)); + + // Change the dependency on `bar` to an invalid path + File::create(&p.root().join("foo/Cargo.toml")) + .unwrap() + .write_all( + br#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = { path = "" } + "#, + ) + .unwrap(); + + // Make sure we get a nice error. In the past this actually stack + // overflowed! + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: no matching package named `bar` found +location searched: [..] +required by package `foo v0.5.0 ([..])` +", + ), + ); +} + +#[test] +fn workspace_produces_rlib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.root().join("target/debug/libtop.rlib"), existing_file()); + assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); +} + +#[test] +fn thin_lto_works() { + if !cargotest::is_nightly() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [profile.release] + lto = 'thin' + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--release").arg("-v"), + execs().with_stderr( + "\ +[COMPILING] top [..] +[RUNNING] `rustc [..] -C lto=thin [..]` +[FINISHED] [..] +", + ), + ); +} diff --git a/tests/testsuite/plugins.rs b/tests/testsuite/plugins.rs new file mode 100644 index 000000000..318a29524 --- /dev/null +++ b/tests/testsuite/plugins.rs @@ -0,0 +1,474 @@ +use std::fs; +use std::env; + +use cargotest::{is_nightly, rustc_host}; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn plugin_to_the_max() { + if !is_nightly() { + return; + } + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo_lib" + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate foo_lib; + + fn main() { foo_lib::foo(); } + "#, + ) + .file( + "src/foo_lib.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + pub fn foo() {} + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(_reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#, + ) + .build(); + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); + assert_that(foo.cargo("doc"), execs().with_status(0)); +} + +#[test] +fn plugin_with_dynamic_native_dependency() { + if !is_nightly() { + return; + } + + let workspace = project("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["builder", "foo"] + "#, + ) + .build(); + + let build = project("ws/builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + "#, + ) + .file( + "src/lib.rs", + r#" + #[no_mangle] + pub extern fn foo() {} + "#, + ) + .build(); + + let foo = project("ws/foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = 'build.rs' + + [lib] + name = "bar" + plugin = true + "#, + ) + .file( + "bar/build.rs", + r#" + use std::path::PathBuf; + use std::env; + + fn main() { + let src = PathBuf::from(env::var("SRC").unwrap()); + println!("cargo:rustc-flags=-L {}/deps", src.parent().unwrap().display()); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + extern crate rustc_plugin; + + use rustc_plugin::Registry; + + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + + #[plugin_registrar] + pub fn bar(_reg: &mut Registry) { + unsafe { foo() } + } + "#, + ) + .build(); + + assert_that(build.cargo("build"), execs().with_status(0)); + + let src = workspace.root().join("target/debug"); + let lib = fs::read_dir(&src) + .unwrap() + .map(|s| s.unwrap().path()) + .find(|lib| { + let lib = lib.file_name().unwrap().to_str().unwrap(); + lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX) + }) + .unwrap(); + + assert_that( + foo.cargo("build").env("SRC", &lib).arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn plugin_integration() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + doctest = false + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("tests/it_works.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn doctest_a_plugin() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + #[macro_use] + extern crate bar; + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() {} + "#, + ) + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +// See #1515 +#[test] +fn native_plugin_dependency_with_custom_ar_linker() { + let target = rustc_host(); + + let _foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#, + ) + .file("src/lib.rs", "") + .build(); + + let bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, + target + ), + ) + .build(); + + assert_that( + bar.cargo("build").arg("--verbose"), + execs().with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[ERROR] [..]linker[..] +", + ), + ); +} + +#[test] +fn panic_abort_plugins() { + if !is_nightly() { + return; + } + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#, + ) + .file( + "foo/src/lib.rs", + r#" + #![feature(rustc_private)] + extern crate syntax; + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn shared_panic_abort_plugins() { + if !is_nightly() { + return; + } + + let p = project("top") + .file( + "Cargo.toml", + r#" + [package] + name = "top" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + foo = { path = "foo" } + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate bar; + ", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + + [dependencies] + bar = { path = "../bar" } + "#, + ) + .file( + "foo/src/lib.rs", + r#" + #![feature(rustc_private)] + extern crate syntax; + extern crate bar; + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} diff --git a/tests/testsuite/proc_macro.rs b/tests/testsuite/proc_macro.rs new file mode 100644 index 000000000..084a815b8 --- /dev/null +++ b/tests/testsuite/proc_macro.rs @@ -0,0 +1,329 @@ +use cargotest::is_nightly; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn probe_cfg_before_crate_type_discovery() { + if !is_nightly() { + return; + } + + let client = project("client") + .file( + "Cargo.toml", + r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [target.'cfg(not(stage300))'.dependencies.noop] + path = "../noop" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#, + ) + .build(); + let _noop = project("noop") + .file( + "Cargo.toml", + r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + assert_that(client.cargo("build"), execs().with_status(0)); +} + +#[test] +fn noop() { + if !is_nightly() { + return; + } + + let client = project("client") + .file( + "Cargo.toml", + r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [dependencies.noop] + path = "../noop" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#, + ) + .build(); + let _noop = project("noop") + .file( + "Cargo.toml", + r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + assert_that(client.cargo("build"), execs().with_status(0)); + assert_that(client.cargo("build"), execs().with_status(0)); +} + +#[test] +fn impl_and_derive() { + if !is_nightly() { + return; + } + + let client = project("client") + .file( + "Cargo.toml", + r#" + [package] + name = "client" + version = "0.0.1" + authors = [] + + [dependencies.transmogrify] + path = "../transmogrify" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(proc_macro)] + + #[macro_use] + extern crate transmogrify; + + trait ImplByTransmogrify { + fn impl_by_transmogrify(&self) -> bool; + } + + #[derive(Transmogrify, Debug)] + struct X { success: bool } + + fn main() { + let x = X::new(); + assert!(x.impl_by_transmogrify()); + println!("{:?}", x); + } + "#, + ) + .build(); + let _transmogrify = project("transmogrify") + .file( + "Cargo.toml", + r#" + [package] + name = "transmogrify" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(proc_macro, proc_macro_lib)] + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Transmogrify)] + #[doc(hidden)] + pub fn transmogrify(input: TokenStream) -> TokenStream { + " + impl X { + fn new() -> Self { + X { success: true } + } + } + + impl ImplByTransmogrify for X { + fn impl_by_transmogrify(&self) -> bool { + true + } + } + ".parse().unwrap() + } + "#, + ) + .build(); + + assert_that(client.cargo("build"), execs().with_status(0)); + assert_that( + client.cargo("run"), + execs().with_status(0).with_stdout("X { success: true }"), + ); +} + +#[test] +fn plugin_and_proc_macro() { + if !is_nightly() { + return; + } + + let questionable = project("questionable") + .file( + "Cargo.toml", + r#" + [package] + name = "questionable" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + #![feature(proc_macro, proc_macro_lib)] + + extern crate rustc_plugin; + use rustc_plugin::Registry; + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[plugin_registrar] + pub fn plugin_registrar(reg: &mut Registry) {} + + #[proc_macro_derive(Questionable)] + pub fn questionable(input: TokenStream) -> TokenStream { + input + } + "#, + ) + .build(); + + let msg = " lib.plugin and lib.proc-macro cannot both be true"; + assert_that( + questionable.cargo("build"), + execs().with_status(101).with_stderr_contains(msg), + ); +} + +#[test] +fn proc_macro_doctest() { + if !is_nightly() { + return; + } + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" +#![feature(proc_macro, proc_macro_lib)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +/// ``` +/// assert!(true); +/// ``` +#[proc_macro_derive(Bar)] +pub fn derive(_input: TokenStream) -> TokenStream { + "".parse().unwrap() +} + +#[test] +fn a() { + assert!(true); +} +"#, + ) + .build(); + + assert_that( + foo.cargo("test"), + execs() + .with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains_n("test [..] ... ok", 2), + ); +} diff --git a/tests/testsuite/profiles.rs b/tests/testsuite/profiles.rs new file mode 100644 index 000000000..999de518c --- /dev/null +++ b/tests/testsuite/profiles.rs @@ -0,0 +1,342 @@ +use std::env; + +use cargotest::is_nightly; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn profile_overrides() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 1 + debug = false + rpath = true + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=1 \ + -C debug-assertions=on \ + -C metadata=[..] \ + -C rpath \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [optimized] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + )), + ); +} + +#[test] +fn opt_level_override_0() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 0 + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ); +} + +#[test] +fn debug_override_1() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 1 + "#, + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=1 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ); +} + +fn check_opt_level_override(profile_level: &str, rustc_level: &str) { + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = {level} + "#, + level = profile_level + ), + ) + .file("src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level={level} \ + -C debuginfo=2 \ + -C debug-assertions=on \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] [..] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + level = rustc_level + )), + ); +} + +#[test] +fn opt_level_overrides() { + if !is_nightly() { + return; + } + + for &(profile_level, rustc_level) in &[ + ("1", "1"), + ("2", "2"), + ("3", "3"), + ("\"s\"", "s"), + ("\"z\"", "z"), + ] { + check_opt_level_override(profile_level, rustc_level) + } +} + +#[test] +fn top_level_overrides_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 1 + debug = true + + [dependencies.foo] + path = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 0 + debug = false + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + assert_that( + p.cargo("build").arg("-v").arg("--release"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ({url}/foo) +[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \ + --crate-type dylib --crate-type rlib \ + --emit=dep-info,link \ + -C prefer-dynamic \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] test v0.0.0 ({url}) +[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern foo={dir}[/]target[/]release[/]deps[/]\ + {prefix}foo[..]{suffix} \ + --extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib` +[FINISHED] release [optimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX + )), + ); +} + +#[test] +fn profile_in_non_root_manifest_triggers_a_warning() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + + [profile.dev] + debug = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + + [profile.dev] + opt-level = 1 + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")).arg("-v"), + execs().with_status(0).with_stderr( + "\ +[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root: +package: [..] +workspace: [..] +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized] target(s) in [..]", + ), + ); +} + +#[test] +fn profile_in_virtual_manifest_works() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + + [profile.dev] + opt-level = 1 + debug = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")).arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [optimized] target(s) in [..]", + ), + ); +} diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs new file mode 100644 index 000000000..0eba5a8dd --- /dev/null +++ b/tests/testsuite/publish.rs @@ -0,0 +1,874 @@ +use std::io::prelude::*; +use std::fs::{self, File}; +use std::io::SeekFrom; + +use cargotest::ChannelChanger; +use cargotest::support::git::repo; +use cargotest::support::paths; +use cargotest::support::{execs, project, publish}; +use flate2::read::GzDecoder; +use hamcrest::assert_that; +use tar::Archive; + +#[test] +fn simple() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--no-verify") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry() + )), + ); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) + | ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f); + assert_eq!( + rdr.header().unwrap().filename().unwrap(), + b"foo-0.0.1.crate" + ); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + file.header().path() + ); + } +} + +#[test] +fn old_token_location() { + publish::setup(); + + // publish::setup puts a token in this file. + fs::remove_file(paths::root().join(".cargo/config")).unwrap(); + + let credentials = paths::root().join("home/.cargo/credentials"); + File::create(credentials) + .unwrap() + .write_all( + br#" + token = "api-token" + "#, + ) + .unwrap(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--no-verify") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry() + )), + ); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) + | ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f); + assert_eq!( + rdr.header().unwrap().filename().unwrap(), + b"foo-0.0.1.crate" + ); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + file.header().path() + ); + } +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[test] +fn simple_with_host() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--no-verify") + .arg("--host") + .arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry() + )), + ); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) + | ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f); + assert_eq!( + rdr.header().unwrap().filename().unwrap(), + "foo-0.0.1.crate".as_bytes() + ); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + file.header().path() + ); + } +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[test] +fn simple_with_index_and_host() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--no-verify") + .arg("--index") + .arg(publish::registry().to_string()) + .arg("--host") + .arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[UPLOADING] foo v0.0.1 ({dir}) +", + dir = p.url(), + reg = publish::registry() + )), + ); + + let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap(); + // Skip the metadata payload and the size of the tarball + let mut sz = [0; 4]; + assert_eq!(f.read(&mut sz).unwrap(), 4); + let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) + | ((sz[3] as u32) << 24); + f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); + + // Verify the tarball + let mut rdr = GzDecoder::new(f); + assert_eq!( + rdr.header().unwrap().filename().unwrap(), + "foo-0.0.1.crate".as_bytes() + ); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + for file in ar.entries().unwrap() { + let file = file.unwrap(); + let fname = file.header().path_bytes(); + let fname = &*fname; + assert!( + fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig" + || fname == b"foo-0.0.1/src/main.rs", + "unexpected filename: {:?}", + file.header().path() + ); + } +} + +#[test] +fn git_deps() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.foo] + git = "git://path/to/nowhere" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("-v") + .arg("--no-verify") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[ERROR] crates cannot be published to crates.io with dependencies sourced from \ +a repository\neither publish `foo` as its own crate on crates.io and \ +specify a crates.io version as a dependency or pull it into this \ +repository and specify it with a path and version\n\ +(crate `foo` has repository path `git://path/to/nowhere`)\ +", + ), + ); +} + +#[test] +fn path_dependency_no_version() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[ERROR] all path dependencies must have a version specified when publishing. +dependency `bar` does not specify a version +", + ), + ); +} + +#[test] +fn unpublishable_crate() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(101).with_stderr( + "\ +[ERROR] some crates cannot be published. +`foo` is marked as unpublishable +", + ), + ); +} + +#[test] +fn dont_publish_dirty() { + publish::setup(); + let p = project("foo").file("bar", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry `[..]` +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +bar + +to proceed despite this, pass the `--allow-dirty` flag +", + ), + ); +} + +#[test] +fn publish_clean() { + publish::setup(); + + let p = project("foo").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0), + ); +} + +#[test] +fn publish_in_sub_repo() { + publish::setup(); + + let p = project("foo").file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .cwd(p.root().join("bar")) + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0), + ); +} + +#[test] +fn publish_when_ignored() { + publish::setup(); + + let p = project("foo").file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file(".gitignore", "baz") + .build(); + + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0), + ); +} + +#[test] +fn ignore_when_crate_ignored() { + publish::setup(); + + let p = project("foo").file("bar/baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file(".gitignore", "bar") + .nocommit_file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .nocommit_file("bar/src/main.rs", "fn main() {}"); + assert_that( + p.cargo("publish") + .cwd(p.root().join("bar")) + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0), + ); +} + +#[test] +fn new_crate_rejected() { + publish::setup(); + + let p = project("foo").file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .nocommit_file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .nocommit_file("src/main.rs", "fn main() {}"); + assert_that( + p.cargo("publish") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(101), + ); +} + +#[test] +fn dry_run() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .arg("--dry-run") + .arg("--index") + .arg(publish::registry().to_string()), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[COMPILING] foo v0.0.1 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[UPLOADING] foo v0.0.1 ({dir}) +[WARNING] aborting upload due to dry run +", + dir = p.url() + )), + ); + + // Ensure the API request wasn't actually made + assert!(!publish::upload_path().join("api/v1/crates/new").exists()); +} + +#[test] +fn block_publish_feature_not_enabled() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [ + "test" + ] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `publish` manifest key is unstable for anything other than a value of true or false + +Caused by: + feature `alternative-registries` is required + +consider adding `cargo-features = [\"alternative-registries\"]` to the manifest +", + ), + ); +} + +#[test] +fn registry_not_in_publish_list() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [ + "test" + ] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(101).with_stderr( + "\ +[ERROR] some crates cannot be published. +`foo` is marked as unpublishable +", + ), + ); +} + +#[test] +fn publish_empty_list() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(101).with_stderr( + "\ +[ERROR] some crates cannot be published. +`foo` is marked as unpublishable +", + ), + ); +} + +#[test] +fn publish_allowed_registry() { + publish::setup(); + + let p = project("foo").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + publish = ["alternative"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(0), + ); +} + +#[test] +fn block_publish_no_registry() { + publish::setup(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["alternative-registries"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("publish") + .masquerade_as_nightly_cargo() + .arg("--registry") + .arg("alternative") + .arg("-Zunstable-options"), + execs().with_status(101).with_stderr( + "\ +[ERROR] some crates cannot be published. +`foo` is marked as unpublishable +", + ), + ); +} diff --git a/tests/testsuite/read_manifest.rs b/tests/testsuite/read_manifest.rs new file mode 100644 index 000000000..fff5a2f18 --- /dev/null +++ b/tests/testsuite/read_manifest.rs @@ -0,0 +1,105 @@ +use cargotest::support::{basic_bin_manifest, execs, main_file, project}; +use hamcrest::assert_that; + +static MANIFEST_OUTPUT: &'static str = r#" +{ + "name":"foo", + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "license": null, + "license_file": null, + "description": null, + "source":null, + "dependencies":[], + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "name":"foo", + "src_path":"[..][/]foo[/]src[/]foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml" +}"#; + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("read-manifest") + .arg("--manifest-path") + .arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("read-manifest") + .arg("--manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_parent_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("read-manifest") + .arg("--manifest-path") + .arg("foo") + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ), + ); +} + +#[test] +fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("read-manifest") + .arg("--manifest-path") + .arg(p.root()) + .cwd(p.root().parent().unwrap()), + execs().with_status(101).with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ), + ); +} + +#[test] +fn cargo_read_manifest_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("read-manifest").cwd(p.root()), + execs().with_status(0).with_json(MANIFEST_OUTPUT), + ); +} diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs new file mode 100644 index 000000000..8597f144f --- /dev/null +++ b/tests/testsuite/registry.rs @@ -0,0 +1,1802 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cargotest::cargo_process; +use cargotest::support::git; +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::{self, Package}; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; +use url::Url; + +fn registry_path() -> PathBuf { + paths::root().join("registry") +} +fn registry() -> Url { + Url::from_file_path(&*registry_path()).ok().unwrap() +} + +#[test] +fn simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry() + )), + ); + + assert_that(p.cargo("clean"), execs().with_status(0)); + + // Don't download a second time + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry() + )), + ); +} + +#[test] +fn nonexistent() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + nonexistent = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +error: no matching package named `nonexistent` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ), + ); +} + +#[test] +fn wrong_version() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + foo = ">= 1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.0.1").publish(); + Package::new("foo", "0.0.2").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: no matching version `>= 1.0.0` found for package `foo` +location searched: registry [..] +versions found: 0.0.2, 0.0.1 +required by package `foo v0.0.1 ([..])` +", + ), + ); + + Package::new("foo", "0.0.3").publish(); + Package::new("foo", "0.0.4").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: no matching version `>= 1.0.0` found for package `foo` +location searched: registry [..] +versions found: 0.0.4, 0.0.3, 0.0.2, ... +required by package `foo v0.0.1 ([..])` +", + ), + ); +} + +#[test] +fn bad_cksum() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bad-cksum = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let pkg = Package::new("bad-cksum", "0.0.1"); + pkg.publish(); + t!(File::create(&pkg.archive_dst())); + + assert_that( + p.cargo("build").arg("-v"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] registry [..] +[DOWNLOADING] bad-cksum [..] +[ERROR] unable to get packages from source + +Caused by: + failed to download replaced source registry `https://[..]` + +Caused by: + failed to verify the checksum of `bad-cksum v0.0.1 (registry `file://[..]`)` +", + ), + ); +} + +#[test] +fn update_registry() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + notyet = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: no matching package named `notyet` found +location searched: registry `[..]` +required by package `foo v0.0.1 ([..])` +", + ), + ); + + Package::new("notyet", "0.0.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `{reg}` +[DOWNLOADING] notyet v0.0.1 (registry `file://[..]`) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url(), + reg = registry::registry() + )), + ); +} + +#[test] +fn package_with_path_deps() { + Package::new("init", "0.0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + + [dependencies.notyet] + version = "0.0.1" + path = "notyet" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "notyet/Cargo.toml", + r#" + [package] + name = "notyet" + version = "0.0.1" + authors = [] + "#, + ) + .file("notyet/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("package").arg("-v"), + execs().with_status(101).with_stderr_contains( + "\ +[ERROR] failed to verify package tarball + +Caused by: + no matching package named `notyet` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ), + ); + + Package::new("notyet", "0.0.1").publish(); + + assert_that( + p.cargo("package"), + execs().with_status(0).with_stderr(format!( + "\ +[PACKAGING] foo v0.0.1 ({dir}) +[VERIFYING] foo v0.0.1 ({dir}) +[UPDATING] registry `[..]` +[DOWNLOADING] notyet v0.0.1 (registry `file://[..]`) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ({dir}[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn lockfile_locks() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + + p.root().move_into_the_past(); + Package::new("bar", "0.0.2").publish(); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn lockfile_locks_transitively() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + + p.root().move_into_the_past(); + Package::new("baz", "0.0.2").publish(); + Package::new("bar", "0.0.2").dep("baz", "*").publish(); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn yanks_are_not_used() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + Package::new("bar", "0.0.2") + .dep("baz", "*") + .yanked(true) + .publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn relying_on_a_yank_is_bad() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr_contains( + "\ +error: no matching version `= 0.0.2` found for package `baz` +location searched: registry `[..]` +versions found: 0.0.1 +required by package `bar v0.0.1` +", + ), + ); +} + +#[test] +fn yanks_in_lockfiles_are_ok() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + registry::registry_path().join("3").rm_rf(); + + Package::new("bar", "0.0.1").yanked(true).publish(); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + + assert_that( + p.cargo("update"), + execs().with_status(101).with_stderr_contains( + "\ +error: no matching package named `bar` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ), + ); +} + +#[test] +fn update_with_lockfile_if_packages_missing() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + p.root().move_into_the_past(); + + paths::home().join(".cargo/registry").rm_rf(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry `file://[..]`) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + ), + ); +} + +#[test] +fn update_lockfile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + println!("0.0.1"); + Package::new("bar", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("bar", "0.0.2").publish(); + Package::new("bar", "0.0.3").publish(); + paths::home().join(".cargo/registry").rm_rf(); + println!("0.0.2 update"); + assert_that( + p.cargo("update") + .arg("-p") + .arg("bar") + .arg("--precise") + .arg("0.0.2"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.1 -> v0.0.2 +", + ), + ); + + println!("0.0.2 build"); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[DOWNLOADING] [..] v0.0.2 (registry `file://[..]`) +[COMPILING] bar v0.0.2 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + + println!("0.0.3 update"); + assert_that( + p.cargo("update").arg("-p").arg("bar"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.2 -> v0.0.3 +", + ), + ); + + println!("0.0.3 build"); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[DOWNLOADING] [..] v0.0.3 (registry `file://[..]`) +[COMPILING] bar v0.0.3 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + + println!("new dependencies update"); + Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); + Package::new("spam", "0.2.5").publish(); + assert_that( + p.cargo("update").arg("-p").arg("bar"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.3 -> v0.0.4 +[ADDING] spam v0.2.5 +", + ), + ); + + println!("new dependencies update"); + Package::new("bar", "0.0.5").publish(); + assert_that( + p.cargo("update").arg("-p").arg("bar"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] bar v0.0.4 -> v0.0.5 +[REMOVING] spam v0.2.5 +", + ), + ); +} + +#[test] +fn update_offline() { + use cargotest::ChannelChanger; + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("update") + .masquerade_as_nightly_cargo() + .arg("-Zoffline"), + execs() + .with_status(101) + .with_stderr("error: you can't update in the offline mode[..]"), + ); +} + +#[test] +fn dev_dependency_not_used() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn login_with_no_cargo_dir() { + let home = paths::home().join("new-home"); + t!(fs::create_dir(&home)); + assert_that( + cargo_process().arg("login").arg("foo").arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn login_with_differently_sized_token() { + // Verify that the configuration file gets properly trunchated. + let home = paths::home().join("new-home"); + t!(fs::create_dir(&home)); + assert_that( + cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"), + execs().with_status(0), + ); + assert_that( + cargo_process().arg("login").arg("lmao").arg("-v"), + execs().with_status(0), + ); + assert_that( + cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn bad_license_file() { + Package::new("foo", "1.0.0").publish(); + let p = project("all") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license-file = "foo" + description = "bar" + repository = "baz" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + assert_that( + p.cargo("publish") + .arg("-v") + .arg("--index") + .arg(registry().to_string()), + execs() + .with_status(101) + .with_stderr_contains("[ERROR] the license file `foo` does not exist"), + ); +} + +#[test] +fn updating_a_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + Package::new("bar", "0.0.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.0.1 (registry `file://[..]`) +[COMPILING] bar v0.0.1 +[COMPILING] a v0.0.1 ({dir}/a) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + + t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all( + br#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "# + )); + Package::new("bar", "0.1.0").publish(); + + println!("second"); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] bar v0.1.0 (registry `file://[..]`) +[COMPILING] bar v0.1.0 +[COMPILING] a v0.0.1 ({dir}/a) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn git_and_registry_dep() { + let b = git::repo(&paths::root().join("b")) + .file( + "Cargo.toml", + r#" + [project] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + + [dependencies.b] + git = '{}' + "#, + b.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.0.1").publish(); + + p.root().move_into_the_past(); + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] [..] +[UPDATING] [..] +[DOWNLOADING] a v0.0.1 (registry `file://[..]`) +[COMPILING] a v0.0.1 +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); + p.root().move_into_the_past(); + + println!("second"); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn update_publish_then_update() { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + Package::new("a", "0.1.0").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + // Next, publish a new package and back up the copy of the registry we just + // created. + Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + t!(fs::rename(®istry, &backup)); + + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project("foo2") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that(p2.cargo("build"), execs().with_status(0)); + registry.rm_rf(); + t!(fs::rename(&backup, ®istry)); + t!(fs::rename( + p2.root().join("Cargo.lock"), + p.root().join("Cargo.lock") + )); + + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr(&format!( + "\ +[UPDATING] [..] +[DOWNLOADING] a v0.1.1 (registry `file://[..]`) +[COMPILING] a v0.1.1 +[COMPILING] foo v0.5.0 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + dir = p.url() + )), + ); +} + +#[test] +fn fetch_downloads() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that( + p.cargo("fetch"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] a v0.1.0 (registry [..]) +", + ), + ); +} + +#[test] +fn update_transitive_dependency() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").dep("b", "*").publish(); + Package::new("b", "0.1.0").publish(); + + assert_that(p.cargo("fetch"), execs().with_status(0)); + + Package::new("b", "0.1.1").publish(); + + assert_that( + p.cargo("update").arg("-pb"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] b v0.1.0 -> v0.1.1 +", + ), + ); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[DOWNLOADING] b v0.1.1 (registry `file://[..]`) +[COMPILING] b v0.1.1 +[COMPILING] a v0.1.0 +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + ), + ); +} + +#[test] +fn update_backtracking_ok() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + webdriver = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("webdriver", "0.1.0") + .dep("hyper", "0.6") + .publish(); + Package::new("hyper", "0.6.5") + .dep("openssl", "0.1") + .dep("cookie", "0.1") + .publish(); + Package::new("cookie", "0.1.0") + .dep("openssl", "0.1") + .publish(); + Package::new("openssl", "0.1.0").publish(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + + Package::new("openssl", "0.1.1").publish(); + Package::new("hyper", "0.6.6") + .dep("openssl", "0.1.1") + .dep("cookie", "0.1.0") + .publish(); + + assert_that( + p.cargo("update").arg("-p").arg("hyper"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] hyper v0.6.5 -> v0.6.6 +[UPDATING] openssl v0.1.0 -> v0.1.1 +", + ), + ); +} + +#[test] +fn update_multiple_packages() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "*" + b = "*" + c = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + Package::new("b", "0.1.0").publish(); + Package::new("c", "0.1.0").publish(); + + assert_that(p.cargo("fetch"), execs().with_status(0)); + + Package::new("a", "0.1.1").publish(); + Package::new("b", "0.1.1").publish(); + Package::new("c", "0.1.1").publish(); + + assert_that( + p.cargo("update").arg("-pa").arg("-pb"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] a v0.1.0 -> v0.1.1 +[UPDATING] b v0.1.0 -> v0.1.1 +", + ), + ); + + assert_that( + p.cargo("update").arg("-pb").arg("-pc"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] c v0.1.0 -> v0.1.1 +", + ), + ); + + assert_that( + p.cargo("build"), + execs() + .with_status(0) + .with_stderr_contains("[DOWNLOADING] a v0.1.1 (registry `file://[..]`)") + .with_stderr_contains("[DOWNLOADING] b v0.1.1 (registry `file://[..]`)") + .with_stderr_contains("[DOWNLOADING] c v0.1.1 (registry `file://[..]`)") + .with_stderr_contains("[COMPILING] a v0.1.1") + .with_stderr_contains("[COMPILING] b v0.1.1") + .with_stderr_contains("[COMPILING] c v0.1.1") + .with_stderr_contains("[COMPILING] foo v0.5.0 ([..])"), + ); +} + +#[test] +fn bundled_crate_in_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.1" + baz = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0") + .dep("bar", "0.1.0") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar", version = "0.1.0" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .publish(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn update_same_prefix_oh_my_how_was_this_a_bug() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "ugh" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foobar", "0.2.0").publish(); + Package::new("foo", "0.1.0") + .dep("foobar", "0.2.0") + .publish(); + + assert_that(p.cargo("generate-lockfile"), execs().with_status(0)); + assert_that( + p.cargo("update").arg("-pfoobar").arg("--precise=0.2.0"), + execs().with_status(0), + ); +} + +#[test] +fn use_semver() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "1.2.3-alpha.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "1.2.3-alpha.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn only_download_relevant() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [target.foo.dependencies] + foo = "*" + [dev-dependencies] + bar = "*" + [dependencies] + baz = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] baz v0.1.0 ([..]) +[COMPILING] baz v0.1.0 +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs +", + ), + ); +} + +#[test] +fn resolve_and_backtracking() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.1") + .feature_dep("bar", "0.1", &["a", "b"]) + .publish(); + Package::new("foo", "0.1.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn upstream_warnings_on_extra_verbose() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0") + .file("src/lib.rs", "fn unused() {}") + .publish(); + + assert_that( + p.cargo("build").arg("-vv"), + execs() + .with_status(0) + .with_stderr_contains("[..]warning: function is never used[..]"), + ); +} + +#[test] +fn disallow_network() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--frozen"), + execs().with_status(101).with_stderr( + "\ +error: failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry [..] + +Caused by: + attempting to make an HTTP request, but --frozen was specified +", + ), + ); +} + +#[test] +fn add_dep_dont_update_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + remote = "0.3" + "# + )); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn bump_version_dont_update_registry() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [project] + name = "bar" + version = "0.6.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "# + )); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] bar v0.6.0 ([..]) +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn old_version_req() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.2*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.2.0").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[UPDATING] [..] +[DOWNLOADING] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn old_version_req_upstream() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.3.0") + .file( + "Cargo.toml", + r#" + [project] + name = "remote" + version = "0.3.0" + authors = [] + + [dependencies] + bar = "0.2*" + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("bar", "0.2.0").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] [..] +[DOWNLOADING] [..] +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of remote 0.3.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ), + ); +} + +#[test] +fn toml_lies_but_index_is_truth() { + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.3.0") + .dep("foo", "0.2.0") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.3.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "extern crate foo;") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.3" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn vv_prints_warnings() { + Package::new("foo", "0.2.0") + .file( + "src/lib.rs", + r#" + #![deny(warnings)] + + fn foo() {} // unused function + "#, + ) + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-vv"), execs().with_status(0)); +} + +#[test] +fn bad_and_or_malicious_packages_rejected() { + Package::new("foo", "0.2.0") + .extra_file("foo-0.1.0/src/lib.rs", "") + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("-vv"), + execs().with_status(101).with_stderr( + "\ +[UPDATING] [..] +[DOWNLOADING] [..] +error: unable to get packages from source + +Caused by: + failed to download [..] + +Caused by: + failed to unpack [..] + +Caused by: + [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\" +", + ), + ); +} diff --git a/tests/testsuite/rename_deps.rs b/tests/testsuite/rename_deps.rs new file mode 100644 index 000000000..5ce740556 --- /dev/null +++ b/tests/testsuite/rename_deps.rs @@ -0,0 +1,147 @@ +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use cargotest::ChannelChanger; +use hamcrest::assert_that; + +#[test] +fn gated() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { package = "foo", version = "0.1" } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `rename-dependency` is required + +consider adding `cargo-features = [\"rename-dependency\"]` to the manifest +", + ), + ); + + let p = project("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { version = "0.1", package = "baz" } + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(101).with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `rename-dependency` is required + +consider adding `cargo-features = [\"rename-dependency\"]` to the manifest +", + ), + ); +} + +#[test] +fn rename_dependency() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["rename-dependency"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { version = "0.1.0" } + baz = { version = "0.2.0", package = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate bar; + extern crate baz; + ", + ) + .build(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0), + ); +} + +#[test] +fn rename_with_different_names() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + cargo-features = ["rename-dependency"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + baz = { path = "bar", package = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate baz; + ", + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "random_name" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").masquerade_as_nightly_cargo(), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/required_features.rs b/tests/testsuite/required_features.rs new file mode 100644 index 000000000..93ee37780 --- /dev/null +++ b/tests/testsuite/required_features.rs @@ -0,0 +1,1352 @@ +use cargotest::is_nightly; +use cargotest::install::{cargo_home, has_installed_exe}; +use cargotest::support::{execs, project}; +use hamcrest::{assert_that, existing_file, is_not}; + +#[test] +fn build_bin_default_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + #[cfg(feature = "a")] + fn test() { + foo::foo(); + } + + fn main() {} + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "a")] + pub fn foo() {} + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + p.cargo("build").arg("--no-default-features"), + execs().with_status(0), + ); + + assert_that(p.cargo("build").arg("--bin=foo"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + p.cargo("build") + .arg("--bin=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); +} + +#[test] +fn build_bin_arg_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--features").arg("a"), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn build_bin_multiple_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#, + ) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.bin("foo_1"), is_not(existing_file())); + assert_that(&p.bin("foo_2"), existing_file()); + + assert_that( + p.cargo("build").arg("--features").arg("c"), + execs().with_status(0), + ); + + assert_that(&p.bin("foo_1"), existing_file()); + assert_that(&p.bin("foo_2"), existing_file()); + + assert_that( + p.cargo("build").arg("--no-default-features"), + execs().with_status(0), + ); +} + +#[test] +fn build_example_default_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--example=foo"), + execs().with_status(0), + ); + assert_that(&p.bin("examples/foo"), existing_file()); + + assert_that( + p.cargo("build") + .arg("--example=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); +} + +#[test] +fn build_example_arg_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build") + .arg("--example=foo") + .arg("--features") + .arg("a"), + execs().with_status(0), + ); + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn build_example_multiple_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[example]] + name = "foo_1" + required-features = ["b", "c"] + + [[example]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file("examples/foo_1.rs", "fn main() {}") + .file("examples/foo_2.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("build").arg("--example=foo_1"), + execs().with_status(101).with_stderr( + "\ +error: target `foo_1` requires the features: `b`, `c` +Consider enabling them by passing e.g. `--features=\"b c\"` +", + ), + ); + assert_that( + p.cargo("build").arg("--example=foo_2"), + execs().with_status(0), + ); + + assert_that(&p.bin("examples/foo_1"), is_not(existing_file())); + assert_that(&p.bin("examples/foo_2"), existing_file()); + + assert_that( + p.cargo("build") + .arg("--example=foo_1") + .arg("--features") + .arg("c"), + execs().with_status(0), + ); + assert_that( + p.cargo("build") + .arg("--example=foo_2") + .arg("--features") + .arg("c"), + execs().with_status(0), + ); + + assert_that(&p.bin("examples/foo_1"), existing_file()); + assert_that(&p.bin("examples/foo_2"), existing_file()); + + assert_that( + p.cargo("build") + .arg("--example=foo_1") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo_1` requires the features: `b`, `c` +Consider enabling them by passing e.g. `--features=\"b c\"` +", + ), + ); + assert_that( + p.cargo("build") + .arg("--example=foo_2") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo_2` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); +} + +#[test] +fn test_default_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test ... ok"), + ); + + assert_that( + p.cargo("test").arg("--no-default-features"), + execs() + .with_status(0) + .with_stderr(format!( + "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" + )) + .with_stdout(""), + ); + + assert_that( + p.cargo("test").arg("--test=foo"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]" + )) + .with_stdout_contains("test test ... ok"), + ); + + assert_that( + p.cargo("test") + .arg("--test=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); +} + +#[test] +fn test_arg_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + assert_that( + p.cargo("test").arg("--features").arg("a"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test ... ok"), + ); +} + +#[test] +fn test_multiple_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[test]] + name = "foo_1" + required-features = ["b", "c"] + + [[test]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file("tests/foo_1.rs", "#[test]\nfn test() {}") + .file("tests/foo_2.rs", "#[test]\nfn test() {}") + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test ... ok"), + ); + + assert_that( + p.cargo("test").arg("--features").arg("c"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo_1-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", + p.url() + )) + .with_stdout_contains_n("test test ... ok", 2), + ); + + assert_that( + p.cargo("test").arg("--no-default-features"), + execs() + .with_status(0) + .with_stderr(format!( + "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" + )) + .with_stdout(""), + ); +} + +#[test] +fn bench_default_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); + + assert_that( + p.cargo("bench").arg("--no-default-features"), + execs() + .with_status(0) + .with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout(""), + ); + + assert_that( + p.cargo("bench").arg("--bench=foo"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]" + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); + + assert_that( + p.cargo("bench") + .arg("--bench=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); +} + +#[test] +fn bench_arg_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + assert_that( + p.cargo("bench").arg("--features").arg("a"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); +} + +#[test] +fn bench_multiple_required_features() { + if !is_nightly() { + return; + } + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bench]] + name = "foo_1" + required-features = ["b", "c"] + + [[bench]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file( + "benches/foo_1.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "benches/foo_2.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); + + assert_that( + p.cargo("bench").arg("--features").arg("c"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo_1-[..][EXE] +[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", + p.url() + )) + .with_stdout_contains_n("test bench ... bench: [..]", 2), + ); + + assert_that( + p.cargo("bench").arg("--no-default-features"), + execs() + .with_status(0) + .with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout(""), + ); +} + +#[test] +fn install_default_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("install"), execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); + + assert_that( + p.cargo("install").arg("--no-default-features"), + execs().with_status(101).with_stderr(format!( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +" + )), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + + assert_that(p.cargo("install").arg("--bin=foo"), execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); + + assert_that( + p.cargo("install") + .arg("--bin=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr(format!( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +" + )), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + + assert_that( + p.cargo("install").arg("--example=foo"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); + + assert_that( + p.cargo("install") + .arg("--example=foo") + .arg("--no-default-features"), + execs().with_status(101).with_stderr(format!( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +" + )), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); +} + +#[test] +fn install_arg_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("install").arg("--features").arg("a"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); +} + +#[test] +fn install_multiple_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#, + ) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("install"), execs().with_status(0)); + assert_that(cargo_home(), is_not(has_installed_exe("foo_1"))); + assert_that(cargo_home(), has_installed_exe("foo_2")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); + + assert_that( + p.cargo("install").arg("--features").arg("c"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo_1")); + assert_that(cargo_home(), has_installed_exe("foo_2")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); + + assert_that( + p.cargo("install").arg("--no-default-features"), + execs().with_status(101).with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +", + ), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo_1"))); + assert_that(cargo_home(), is_not(has_installed_exe("foo_2"))); +} + +#[test] +fn dep_feature_in_toml() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", features = ["a"] } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + // bin + assert_that(p.cargo("build").arg("--bin=foo"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + // example + assert_that( + p.cargo("build").arg("--example=foo"), + execs().with_status(0), + ); + assert_that(&p.bin("examples/foo"), existing_file()); + + // test + assert_that( + p.cargo("test").arg("--test=foo"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test ... ok"), + ); + + // bench + if is_nightly() { + assert_that( + p.cargo("bench").arg("--bench=foo"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ({0}/bar) +[COMPILING] foo v0.0.1 ({0}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); + } + + // install + assert_that(p.cargo("install"), execs().with_status(0)); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); +} + +#[test] +fn dep_feature_in_cmd_line() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + // bin + assert_that( + p.cargo("build").arg("--bin=foo"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `bar/a` +Consider enabling them by passing e.g. `--features=\"bar/a\"` +", + ), + ); + + assert_that( + p.cargo("build") + .arg("--bin=foo") + .arg("--features") + .arg("bar/a"), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + + // example + assert_that( + p.cargo("build").arg("--example=foo"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `bar/a` +Consider enabling them by passing e.g. `--features=\"bar/a\"` +", + ), + ); + + assert_that( + p.cargo("build") + .arg("--example=foo") + .arg("--features") + .arg("bar/a"), + execs().with_status(0), + ); + assert_that(&p.bin("examples/foo"), existing_file()); + + // test + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" + )) + .with_stdout(""), + ); + + assert_that( + p.cargo("test") + .arg("--test=foo") + .arg("--features") + .arg("bar/a"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test ... ok"), + ); + + // bench + if is_nightly() { + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) + .with_stdout(""), + ); + + assert_that( + p.cargo("bench") + .arg("--bench=foo") + .arg("--features") + .arg("bar/a"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ({0}/bar) +[COMPILING] foo v0.0.1 ({0}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test bench ... bench: [..]"), + ); + } + + // install + assert_that( + p.cargo("install"), + execs().with_status(101).with_stderr(format!( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +" + )), + ); + assert_that(cargo_home(), is_not(has_installed_exe("foo"))); + + assert_that( + p.cargo("install").arg("--features").arg("bar/a"), + execs().with_status(0), + ); + assert_that(cargo_home(), has_installed_exe("foo")); + assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0)); +} + +#[test] +fn test_skips_compiling_bin_with_missing_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "bin_foo" + path = "src/bin/foo.rs" + required-features = ["a"] + "#, + ) + .file("src/bin/foo.rs", "extern crate bar; fn main() {}") + .file("tests/foo.rs", "") + .file("benches/foo.rs", "") + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("running 0 tests"), + ); + + assert_that( + p.cargo("test") + .arg("--features") + .arg("a") + .arg("-j") + .arg("1"), + execs().with_status(101).with_stderr_contains(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +error[E0463]: can't find crate for `bar`", + p.url() + )), + ); + + if is_nightly() { + assert_that( + p.cargo("bench"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("running 0 tests"), + ); + + assert_that( + p.cargo("bench") + .arg("--features") + .arg("a") + .arg("-j") + .arg("1"), + execs().with_status(101).with_stderr_contains(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +error[E0463]: can't find crate for `bar`", + p.url() + )), + ); + } +} + +#[test] +fn run_default() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = [] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that( + p.cargo("run"), + execs().with_status(101).with_stderr( + "\ +error: target `foo` requires the features: `a` +Consider enabling them by passing e.g. `--features=\"a\"` +", + ), + ); + + assert_that( + p.cargo("run").arg("--features").arg("a"), + execs().with_status(0), + ); +} + +#[test] +fn run_default_multiple_required_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + b = [] + + [[bin]] + name = "foo1" + path = "src/foo1.rs" + required-features = ["a"] + + [[bin]] + name = "foo2" + path = "src/foo2.rs" + required-features = ["b"] + "#, + ) + .file("src/lib.rs", "") + .file("src/foo1.rs", "extern crate foo; fn main() {}") + .file("src/foo2.rs", "extern crate foo; fn main() {}") + .build(); + + assert_that( + p.cargo("run"), + execs().with_status(101).with_stderr( + "\ + error: `cargo run` requires that a project only have one executable; \ + use the `--bin` option to specify which one to run\navailable binaries: foo1, foo2", + ), + ); +} diff --git a/tests/testsuite/resolve.rs b/tests/testsuite/resolve.rs new file mode 100644 index 000000000..8ec3a6e46 --- /dev/null +++ b/tests/testsuite/resolve.rs @@ -0,0 +1,990 @@ +use std::collections::{BTreeMap, HashSet}; + +use hamcrest::{assert_that, contains, is_not}; + +use cargo::core::source::{GitReference, SourceId}; +use cargo::core::dependency::Kind::{self, Development}; +use cargo::core::{Dependency, PackageId, Registry, Summary}; +use cargo::util::{CargoResult, Config, ToUrl}; +use cargo::core::resolver::{self, Method}; + +use cargotest::ChannelChanger; +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; + +fn resolve( + pkg: &PackageId, + deps: Vec, + registry: &[Summary], +) -> CargoResult> { + resolve_with_config(pkg, deps, registry, None) +} + +fn resolve_with_config( + pkg: &PackageId, + deps: Vec, + registry: &[Summary], + config: Option<&Config>, +) -> CargoResult> { + struct MyRegistry<'a>(&'a [Summary]); + impl<'a> Registry for MyRegistry<'a> { + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + for summary in self.0.iter() { + if dep.matches(summary) { + f(summary.clone()); + } + } + Ok(()) + } + fn supports_checksums(&self) -> bool { + false + } + fn requires_precise(&self) -> bool { + false + } + } + let mut registry = MyRegistry(registry); + let summary = Summary::new(pkg.clone(), deps, BTreeMap::new(), None).unwrap(); + let method = Method::Everything; + let resolve = resolver::resolve( + &[(summary, method)], + &[], + &mut registry, + &HashSet::new(), + config, + false, + )?; + let res = resolve.iter().cloned().collect(); + Ok(res) +} + +trait ToDep { + fn to_dep(self) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::for_registry(&url).unwrap(); + Dependency::parse_no_deprecated(self, Some("1.0.0"), &source_id).unwrap() + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } +} + +trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl<'a> ToPkgId for &'a str { + fn to_pkgid(&self) -> PackageId { + PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() + } +} + +impl<'a> ToPkgId for (&'a str, &'a str) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = *self; + PackageId::new(name, vers, ®istry_loc()).unwrap() + } +} + +impl<'a> ToPkgId for (&'a str, String) { + fn to_pkgid(&self) -> PackageId { + let (name, ref vers) = *self; + PackageId::new(name, vers, ®istry_loc()).unwrap() + } +} + +macro_rules! pkg { + ($pkgid:expr => [$($deps:expr),+]) => ({ + let d: Vec = vec![$($deps.to_dep()),+]; + let pkgid = $pkgid.to_pkgid(); + let link = if pkgid.name().ends_with("-sys") {Some(pkgid.name().to_string())} else {None}; + + Summary::new(pkgid, d, BTreeMap::new(), link).unwrap() + }); + + ($pkgid:expr) => ({ + let pkgid = $pkgid.to_pkgid(); + let link = if pkgid.name().ends_with("-sys") {Some(pkgid.name().to_string())} else {None}; + Summary::new(pkgid, Vec::new(), BTreeMap::new(), link).unwrap() + }) +} + +fn registry_loc() -> SourceId { + let remote = "http://example.com".to_url().unwrap(); + SourceId::for_registry(&remote).unwrap() +} + +fn pkg(name: &str) -> Summary { + let link = if name.ends_with("-sys") { + Some(name.to_string()) + } else { + None + }; + Summary::new(pkg_id(name), Vec::new(), BTreeMap::new(), link).unwrap() +} + +fn pkg_id(name: &str) -> PackageId { + PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.to_url(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); + + PackageId::new(name, "1.0.0", &source_id).unwrap() +} + +fn pkg_loc(name: &str, loc: &str) -> Summary { + let link = if name.ends_with("-sys") { + Some(name.to_string()) + } else { + None + }; + Summary::new(pkg_id_loc(name, loc), Vec::new(), BTreeMap::new(), link).unwrap() +} + +fn dep(name: &str) -> Dependency { + dep_req(name, "1.0.0") +} +fn dep_req(name: &str, req: &str) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::for_registry(&url).unwrap(); + Dependency::parse_no_deprecated(name, Some(req), &source_id).unwrap() +} + +fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.to_url().unwrap(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&url, master).unwrap(); + Dependency::parse_no_deprecated(name, Some("1.0.0"), &source_id).unwrap() +} +fn dep_kind(name: &str, kind: Kind) -> Dependency { + dep(name).set_kind(kind).clone() +} + +fn registry(pkgs: Vec

) -> Vec { + pkgs +} + +fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names + .iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)) + .collect() +} + +#[test] +#[should_panic(expected = "assertion failed: name.len() > 0")] +fn test_dependency_with_empty_name() { + // Bug 5229, dependency-names must not be empty + "".to_dep(); +} + +#[test] +fn test_resolving_empty_dependency_list() { + let res = resolve(&pkg_id("root"), Vec::new(), ®istry(vec![])).unwrap(); + + assert_eq!(res, names(&["root"])); +} + +fn assert_same(a: &[PackageId], b: &[PackageId]) { + assert_eq!(a.len(), b.len()); + for item in a { + assert!(b.contains(item)); + } +} + +#[test] +fn test_resolving_only_package() { + let reg = registry(vec![pkg("foo")]); + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo"])); +} + +#[test] +fn test_resolving_one_dep() { + let reg = registry(vec![pkg("foo"), pkg("bar")]); + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo"])); +} + +#[test] +fn test_resolving_multiple_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); + let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo", "baz"])); +} + +#[test] +fn test_resolving_transitive_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); + let res = resolve(&pkg_id("root"), vec![dep("bar")], ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_common_transitive_deps() { + let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); + let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")], ®).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_with_same_name() { + let list = vec![ + pkg_loc("foo", "http://first.example.com"), + pkg_loc("bar", "http://second.example.com"), + ]; + + let reg = registry(list); + let res = resolve( + &pkg_id("root"), + vec![ + dep_loc("foo", "http://first.example.com"), + dep_loc("bar", "http://second.example.com"), + ], + ®, + ).unwrap(); + + let mut names = loc_names(&[ + ("foo", "http://first.example.com"), + ("bar", "http://second.example.com"), + ]); + + names.push(pkg_id("root")); + assert_same(&res, &names); +} + +#[test] +fn test_resolving_with_dev_deps() { + let reg = registry(vec![ + pkg!("foo" => ["bar", dep_kind("baz", Development)]), + pkg!("baz" => ["bat", dep_kind("bam", Development)]), + pkg!("bar"), + pkg!("bat"), + ]); + + let res = resolve( + &pkg_id("root"), + vec![dep("foo"), dep_kind("baz", Development)], + ®, + ).unwrap(); + + assert_that(&res, contains(names(&["root", "foo", "bar", "baz"]))); +} + +#[test] +fn resolving_with_many_versions() { + let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); + + let res = resolve(&pkg_id("root"), vec![dep("foo")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[("root", "1.0.0"), ("foo", "1.0.2")])), + ); +} + +#[test] +fn resolving_with_specific_version() { + let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[("root", "1.0.0"), ("foo", "1.0.1")])), + ); +} + +#[test] +fn test_resolving_maximum_version_with_transitive_deps() { + let reg = registry(vec![ + pkg!(("util", "1.2.2")), + pkg!(("util", "1.0.0")), + pkg!(("util", "1.1.1")), + pkg!("foo" => [dep_req("util", "1.0.0")]), + pkg!("bar" => [dep_req("util", ">=1.0.1")]), + ]); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("util", "1.2.2"), + ])), + ); + assert_that(&res, is_not(contains(names(&[("util", "1.0.1")])))); + assert_that(&res, is_not(contains(names(&[("util", "1.1.1")])))); +} + +#[test] +fn test_resolving_minimum_version_with_transitive_deps() { + // When the minimal-versions config option is specified then the lowest + // possible version of a package should be selected. "util 1.0.0" can't be + // selected because of the requirements of "bar", so the minimum version + // must be 1.1.1. + let reg = registry(vec![ + pkg!(("util", "1.2.2")), + pkg!(("util", "1.0.0")), + pkg!(("util", "1.1.1")), + pkg!("foo" => [dep_req("util", "1.0.0")]), + pkg!("bar" => [dep_req("util", ">=1.0.1")]), + ]); + + let mut config = Config::default().unwrap(); + config + .configure( + 1, + None, + &None, + false, + false, + &["minimal-versions".to_string()], + ) + .unwrap(); + + let res = resolve_with_config( + &pkg_id("root"), + vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], + ®, + Some(&config), + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("util", "1.1.1"), + ])), + ); + assert_that(&res, is_not(contains(names(&[("util", "1.2.2")])))); + assert_that(&res, is_not(contains(names(&[("util", "1.0.0")])))); +} + +// Ensure that the "-Z minimal-versions" CLI option works and the minimal +// version of a dependency ends up in the lock file. +#[test] +fn minimal_version_cli() { + Package::new("dep", "1.0.0").publish(); + Package::new("dep", "1.1.0").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies] + dep = "1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("generate-lockfile") + .masquerade_as_nightly_cargo() + .arg("-Zminimal-versions"), + execs().with_status(0), + ); + + let lock = p.read_lockfile(); + + assert!(lock.contains("dep 1.0.0")); +} + +#[test] +fn resolving_incompat_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + pkg!("bar" => [dep_req("foo", "=1.0.2")]), + ]); + + assert!( + resolve( + &pkg_id("root"), + vec![dep_req("foo", "=1.0.1"), dep("bar")], + ® + ).is_err() + ); +} + +#[test] +fn resolving_backtrack() { + let reg = registry(vec![ + pkg!(("foo", "1.0.2") => [dep("bar")]), + pkg!(("foo", "1.0.1") => [dep("baz")]), + pkg!("bar" => [dep_req("foo", "=2.0.2")]), + pkg!("baz"), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.1"), + ("baz", "1.0.0"), + ])), + ); +} + +#[test] +fn resolving_backtrack_features() { + // test for cargo/issues/4347 + let mut bad = dep("bar"); + bad.set_features(vec!["bad".to_string()]); + + let reg = registry(vec![ + pkg!(("foo", "1.0.2") => [bad]), + pkg!(("foo", "1.0.1") => [dep("bar")]), + pkg!("bar"), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.1"), + ("bar", "1.0.0"), + ])), + ); +} + +#[test] +fn resolving_allows_multiple_compatible_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.0")), + pkg!(("foo", "2.0.0")), + pkg!(("foo", "0.1.0")), + pkg!(("foo", "0.2.0")), + pkg!("bar" => ["d1", "d2", "d3", "d4"]), + pkg!("d1" => [dep_req("foo", "1")]), + pkg!("d2" => [dep_req("foo", "2")]), + pkg!("d3" => [dep_req("foo", "0.1")]), + pkg!("d4" => [dep_req("foo", "0.2")]), + ]); + + let res = resolve(&pkg_id("root"), vec![dep("bar")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("foo", "2.0.0"), + ("foo", "0.1.0"), + ("foo", "0.2.0"), + ("d1", "1.0.0"), + ("d2", "1.0.0"), + ("d3", "1.0.0"), + ("d4", "1.0.0"), + ("bar", "1.0.0"), + ])), + ); +} + +#[test] +fn resolving_with_deep_backtracking() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "2.0.0"), + ("baz", "1.0.1"), + ])), + ); +} + +#[test] +fn resolving_with_sys_crates() { + // This is based on issues/4902 + // With `l` a normal library we get 2copies so everyone gets the newest compatible. + // But `l-sys` a library with a links attribute we make sure there is only one. + let reg = registry(vec![ + pkg!(("l-sys", "0.9.1")), + pkg!(("l-sys", "0.10.0")), + pkg!(("l", "0.9.1")), + pkg!(("l", "0.10.0")), + pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]), + pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]), + ]); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("d", "1"), dep_req("r", "1")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("d", "1.0.0"), + ("r", "1.0.0"), + ("l-sys", "0.9.1"), + ("l", "0.9.1"), + ("l", "0.10.0"), + ])), + ); +} + +#[test] +fn resolving_with_constrained_sibling_backtrack_parent() { + // There is no point in considering all of the backtrack_trap{1,2} + // candidates since they can't change the result of failing to + // resolve 'constrained'. Cargo should (ideally) skip past them and resume + // resolution once the activation of the parent, 'bar', is rolled back. + // Note that the traps are slightly more constrained to make sure they + // get picked first. + let mut reglist = vec![ + pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"), + dep_req("constrained", "=1.0.0")]), + pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"), + dep_req("backtrack_trap2", "1.0.2"), + dep_req("constrained", "1.0.0")]), + pkg!(("constrained", "1.0.0")), + pkg!(("backtrack_trap1", "1.0.0")), + pkg!(("backtrack_trap2", "1.0.0")), + ]; + // Bump this to make the test harder - it adds more versions of bar that will + // fail to resolve, and more versions of the traps to consider. + const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2 + for i in 1..NUM_BARS_AND_TRAPS { + let vsn = format!("1.0.{}", i); + reglist.push( + pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"), + dep_req("backtrack_trap2", "1.0.2"), + dep_req("constrained", "1.0.1")]), + ); + reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); + reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); + reglist.push(pkg!(("constrained", vsn.clone()))); + } + let reg = registry(reglist); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("constrained", "1.0.0"), + ])), + ); +} + +#[test] +fn resolving_with_many_equivalent_backtracking() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 200; + const BRANCHING_FACTOR: usize = 100; + + // Each level depends on the next but the last level does not exist. + // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH) + // and this test will time out. With cashing we need to discover that none of these + // can be activated O(BRANCHING_FACTOR * DEPTH) + for l in 0..DEPTH { + let name = format!("level{}", l); + let next = format!("level{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")])); + } + } + + let reg = registry(reglist.clone()); + + let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], ®); + + assert!(res.is_err()); + + // It is easy to write code that quickly returns an error. + // Lets make sure we can find a good answer if it is there. + reglist.push(pkg!(("level0", "1.0.0"))); + + let reg = registry(reglist.clone()); + + let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[("root", "1.0.0"), ("level0", "1.0.0")])), + ); + + // Make sure we have not special case no candidates. + reglist.push(pkg!(("constrained", "1.1.0"))); + reglist.push(pkg!(("constrained", "1.0.0"))); + reglist.push( + pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]), + ); + + let reg = registry(reglist.clone()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("level0", "*"), dep_req("constrained", "*")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("level0", "1.0.0"), + ("constrained", "1.1.0"), + ])), + ); + + let reg = registry(reglist.clone()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("level0", "1.0.1"), dep_req("constrained", "*")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + (format!("level{}", DEPTH).as_str(), "1.0.0"), + ("constrained", "1.0.0"), + ])), + ); + + let reg = registry(reglist.clone()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")], + ®, + ); + + assert!(res.is_err()); +} + +#[test] +fn resolving_with_deep_traps() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 200; + const BRANCHING_FACTOR: usize = 100; + + // Each backtrack_trap depends on the next, and adds a backtrack frame. + // None of witch is going to help with `bad`. + for l in 0..DEPTH { + let name = format!("backtrack_trap{}", l); + let next = format!("backtrack_trap{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")])); + } + } + { + let name = format!("backtrack_trap{}", DEPTH); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()))); + } + } + { + // slightly less constrained to make sure `cloaking` gets picked last. + for i in 1..(BRANCHING_FACTOR + 10) { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")])); + } + } + + let reg = registry(reglist.clone()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("backtrack_trap0", "*"), dep_req("cloaking", "*")], + ®, + ); + + assert!(res.is_err()); +} + +#[test] +fn resolving_with_constrained_cousins_backtrack() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 100; + const BRANCHING_FACTOR: usize = 50; + + // Each backtrack_trap depends on the next. + // The last depends on a specific ver of constrained. + for l in 0..DEPTH { + let name = format!("backtrack_trap{}", l); + let next = format!("backtrack_trap{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")])); + } + } + { + let name = format!("backtrack_trap{}", DEPTH); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push( + pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]), + ); + } + } + { + // slightly less constrained to make sure `constrained` gets picked last. + for i in 0..(BRANCHING_FACTOR + 10) { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("constrained", vsn.as_str()))); + } + reglist.push(pkg!(("constrained", "1.1.0"))); + reglist.push(pkg!(("constrained", "2.0.0"))); + reglist.push(pkg!(("constrained", "2.0.1"))); + } + reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")])); + + let reg = registry(reglist.clone()); + + // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"` + // but `constrained= "2.0.1"` is already picked. + // Only then to try and solve `constrained= "~1.0.0"` which is incompatible. + let res = resolve( + &pkg_id("root"), + vec![ + dep_req("backtrack_trap0", "*"), + dep_req("constrained", "2.0.1"), + dep_req("cloaking", "*"), + ], + ®, + ); + + assert!(res.is_err()); + + // Each level depends on the next but the last depends on incompatible deps. + // Let's make sure that we can cache that a dep has incompatible deps. + for l in 0..DEPTH { + let name = format!("level{}", l); + let next = format!("level{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")])); + } + } + reglist.push( + pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("backtrack_trap0", "*"), + dep_req("cloaking", "*") + ]), + ); + + let reg = registry(reglist.clone()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("level0", "*"), dep_req("constrained", "2.0.1")], + ®, + ); + + assert!(res.is_err()); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("level0", "*"), dep_req("constrained", "2.0.0")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")])), + ); +} + +#[test] +fn resolving_with_constrained_sibling_backtrack_activation() { + // It makes sense to resolve most-constrained deps first, but + // with that logic the backtrack traps here come between the two + // attempted resolutions of 'constrained'. When backtracking, + // cargo should skip past them and resume resolution once the + // number of activations for 'constrained' changes. + let mut reglist = vec![ + pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"), + dep_req("backtrack_trap1", "1.0"), + dep_req("backtrack_trap2", "1.0"), + dep_req("constrained", "<=1.0.60")]), + pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]), + ]; + // Bump these to make the test harder, but you'll also need to + // change the version constraints on `constrained` above. To correctly + // exercise Cargo, the relationship between the values is: + // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn + // to make sure the traps are resolved between `constrained`. + const NUM_TRAPS: usize = 45; // min 1 + const NUM_CONSTRAINED: usize = 100; // min 1 + for i in 0..NUM_TRAPS { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); + reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); + } + for i in 0..NUM_CONSTRAINED { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("constrained", vsn.clone()))); + } + let reg = registry(reglist); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("constrained", "1.0.60"), + ])), + ); +} + +#[test] +fn resolving_with_constrained_sibling_transitive_dep_effects() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // be taken to not miss the transitive effects of alternatives. E.g. + // in the right-to-left resolution of the graph below, B may + // affect whether D is successfully resolved. + // + // A + // / | \ + // B C D + // | | + // C D + let reg = registry(vec![ + pkg!(("A", "1.0.0") => [dep_req("B", "1.0"), + dep_req("C", "1.0"), + dep_req("D", "1.0.100")]), + pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]), + pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]), + pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]), + pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]), + pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]), + pkg!(("D", "1.0.0")), + pkg!(("D", "1.0.1")), + pkg!(("D", "1.0.2")), + pkg!(("D", "1.0.100")), + pkg!(("D", "1.0.101")), + pkg!(("D", "1.0.102")), + pkg!(("D", "1.0.103")), + pkg!(("D", "1.0.104")), + pkg!(("D", "1.0.105")), + ]); + + let res = resolve(&pkg_id("root"), vec![dep_req("A", "1")], ®).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("A", "1.0.0"), + ("B", "1.0.0"), + ("C", "1.0.0"), + ("D", "1.0.105"), + ])), + ); +} + +#[test] +fn resolving_but_no_exists() { + let reg = registry(vec![]); + + let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®); + assert!(res.is_err()); + + assert_eq!( + res.err().unwrap().to_string(), + "\ + no matching package named `foo` found\n\ + location searched: registry `http://example.com/`\n\ + required by package `root v1.0.0 (registry `http://example.com/`)`\ + " + ); +} + +#[test] +fn resolving_cycle() { + let reg = registry(vec![pkg!("foo" => ["foo"])]); + + let _ = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®); +} + +#[test] +fn hard_equality() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.0")), + pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), + ]); + + let res = resolve( + &pkg_id("root"), + vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], + ®, + ).unwrap(); + + assert_that( + &res, + contains(names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ])), + ); +} diff --git a/tests/testsuite/run.rs b/tests/testsuite/run.rs new file mode 100644 index 000000000..742f37d04 --- /dev/null +++ b/tests/testsuite/run.rs @@ -0,0 +1,1137 @@ +use cargo::util::paths::dylib_path_envvar; +use cargotest::support::{execs, project, path2url}; +use hamcrest::{assert_that, existing_file}; + +#[test] +fn simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]foo[EXE]`", + dir = path2url(p.root()) + )) + .with_stdout("hello"), + ); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn simple_quiet() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run -q"), + execs().with_status(0).with_stdout("hello"), + ); + + assert_that( + p.cargo("run --quiet"), + execs().with_status(0).with_stdout("hello"), + ); +} + +#[test] +fn simple_quiet_and_verbose() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("-q").arg("-v"), + execs() + .with_status(101) + .with_stderr("[ERROR] cannot set both --verbose and --quiet"), + ); +} + +#[test] +fn quiet_and_verbose_config() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + ".cargo/config", + r#" + [term] + verbose = true + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + assert_that(p.cargo("run").arg("-q"), execs().with_status(0)); +} + +#[test] +fn simple_with_args() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + assert_eq!(std::env::args().nth(1).unwrap(), "hello"); + assert_eq!(std::env::args().nth(2).unwrap(), "world"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("hello").arg("world"), + execs().with_status(0), + ); +} + +#[test] +fn exit_code() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { std::process::exit(2); } + "#, + ) + .build(); + + let mut output = String::from( + "\ +[COMPILING] foo v0.0.1 (file[..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +", + ); + if !cfg!(unix) { + output.push_str( + "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)", + ); + } + assert_that(p.cargo("run"), execs().with_status(2).with_stderr(output)); +} + +#[test] +fn exit_code_verbose() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { std::process::exit(2); } + "#, + ) + .build(); + + let mut output = String::from( + "\ +[COMPILING] foo v0.0.1 (file[..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +", + ); + if !cfg!(unix) { + output.push_str( + "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)", + ); + } + + assert_that( + p.cargo("run").arg("-v"), + execs().with_status(2).with_stderr(output), + ); +} + +#[test] +fn no_main_file() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("run"), + execs().with_status(101).with_stderr( + "[ERROR] a bin target must be available \ + for `cargo run`\n", + ), + ); +} + +#[test] +fn too_many_bins() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "") + .file("src/bin/b.rs", "") + .build(); + + assert_that( + p.cargo("run"), + execs().with_status(101).with_stderr( + "[ERROR] `cargo run` requires that a project only \ + have one executable; use the `--bin` option \ + to specify which one to run\navailable binaries: [..]\n", + ), + ); +} + +#[test] +fn specify_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "src/bin/b.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello b.rs"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bin").arg("a").arg("-v"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `rustc [..] src[/]lib.rs [..]` +[RUNNING] `rustc [..] src[/]bin[/]a.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]a[EXE]`", + dir = path2url(p.root()) + )) + .with_stdout("hello a.rs"), + ); + + assert_that( + p.cargo("run").arg("--bin").arg("b").arg("-v"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src[/]bin[/]b.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]b[EXE]`", + ) + .with_stdout("hello b.rs"), + ); +} + +#[test] +fn run_example() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "examples/a.rs", + r#" + fn main() { println!("example"); } + "#, + ) + .file( + "src/bin/a.rs", + r#" + fn main() { println!("bin"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--example").arg("a"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]`", + dir = path2url(p.root()) + )) + .with_stdout("example"), + ); +} + +#[test] +fn run_bins() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "examples/a.rs", + r#" + fn main() { println!("example"); } + "#, + ) + .file( + "src/bin/a.rs", + r#" + fn main() { println!("bin"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bins"), + execs().with_status(1).with_stderr_contains( + "error: Found argument '--bins' which wasn't expected, or isn't valid in this context", + ), + ); +} + +#[test] +fn run_with_filename() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "examples/a.rs", + r#" + fn main() { println!("example"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--bin").arg("bin.rs"), + execs() + .with_status(101) + .with_stderr("[ERROR] no bin target named `bin.rs`"), + ); + + assert_that( + p.cargo("run").arg("--bin").arg("a.rs"), + execs().with_status(101).with_stderr( + "\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?", + ), + ); + + assert_that( + p.cargo("run").arg("--example").arg("example.rs"), + execs() + .with_status(101) + .with_stderr("[ERROR] no example target named `example.rs`"), + ); + + assert_that( + p.cargo("run").arg("--example").arg("a.rs"), + execs().with_status(101).with_stderr( + "\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?", + ), + ); +} + +#[test] +fn either_name_or_example() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/bin/a.rs", + r#" + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "examples/b.rs", + r#" + fn main() { println!("hello b.rs"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run") + .arg("--bin") + .arg("a") + .arg("--example") + .arg("b"), + execs().with_status(101).with_stderr( + "[ERROR] `cargo run` can run at most one \ + executable, but multiple were \ + specified", + ), + ); +} + +#[test] +fn one_bin_multiple_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/main.rs", + r#" + fn main() { println!("hello main.rs"); } + "#, + ) + .file( + "examples/a.rs", + r#" + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "examples/b.rs", + r#" + fn main() { println!("hello b.rs"); } + "#, + ) + .build(); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]main[EXE]`", + dir = path2url(p.root()) + )) + .with_stdout("hello main.rs"), + ); +} + +#[test] +fn example_with_release_flag() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "*" + path = "bar" + "#, + ) + .file( + "examples/a.rs", + r#" + extern crate bar; + + fn main() { + if cfg!(debug_assertions) { + println!("slow1") + } else { + println!("fast1") + } + bar::baz(); + } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn baz() { + if cfg!(debug_assertions) { + println!("slow2") + } else { + println!("fast2") + } + } + "#, + ) + .build(); + + assert_that( + p.cargo("run") + .arg("-v") + .arg("--release") + .arg("--example") + .arg("a"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]deps \ + -L dependency={dir}[/]target[/]release[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name a examples[/]a.rs --crate-type bin \ + --emit=dep-info,link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]release[/]examples \ + -L dependency={dir}[/]target[/]release[/]deps \ + --extern bar={dir}[/]target[/]release[/]deps[/]libbar-[..].rlib` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target[/]release[/]examples[/]a[EXE]` +", + dir = p.root().display(), + url = path2url(p.root()), + )) + .with_stdout( + "\ +fast1 +fast2", + ), + ); + + assert_that( + p.cargo("run").arg("-v").arg("--example").arg("a"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({url}/bar) +[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]debug[/]deps \ + -L dependency={dir}[/]target[/]debug[/]deps` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name a examples[/]a.rs --crate-type bin \ + --emit=dep-info,link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir {dir}[/]target[/]debug[/]examples \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern bar={dir}[/]target[/]debug[/]deps[/]libbar-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[/]debug[/]examples[/]a[EXE]` +", + dir = p.root().display(), + url = path2url(p.root()), + )) + .with_stdout( + "\ +slow1 +slow2", + ), + ); +} + +#[test] +fn run_dylib_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate-type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that( + p.cargo("run").arg("hello").arg("world"), + execs().with_status(0), + ); +} + +#[test] +fn release_works() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { if cfg!(debug_assertions) { panic!() } } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--release"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target[/]release[/]foo[EXE]` +", + dir = path2url(p.root()), + )), + ); + assert_that(&p.release_bin("foo"), existing_file()); +} + +#[test] +fn run_bin_different_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file( + "src/bar.rs", + r#" + fn main() { } + "#, + ) + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn dashes_are_forwarded() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + let s: Vec = std::env::args().collect(); + assert_eq!(s[1], "a"); + assert_eq!(s[2], "--"); + assert_eq!(s[3], "b"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("--").arg("a").arg("--").arg("b"), + execs().with_status(0), + ); +} + +#[test] +fn run_from_executable_folder() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { println!("hello"); } + "#, + ) + .build(); + + let cwd = p.root().join("target").join("debug"); + p.cargo("build").exec_with_output().unwrap(); + + assert_that( + p.cargo("run").cwd(cwd), + execs() + .with_status(0) + .with_stderr( + "\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\ + [RUNNING] `.[/]foo[EXE]`", + ) + .with_stdout("hello"), + ); +} + +#[test] +fn run_with_library_paths() { + let p = project("foo"); + + // Only link search directories within the target output directory are + // propagated through to dylib_path_envvar() (see #3366). + let mut dir1 = p.target_debug_dir(); + dir1.push("foo\\backslash"); + + let mut dir2 = p.target_debug_dir(); + dir2.push("dir=containing=equal=signs"); + + let p = p.file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ).file( + "build.rs", + &format!( + r##" + fn main() {{ + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search={}"#); + }} + "##, + dir1.display(), + dir2.display() + ), + ) + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = std::env::var_os("{}").unwrap(); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(paths.contains(&r#"{}"#.into())); + assert!(paths.contains(&r#"{}"#.into())); + }} + "##, + dylib_path_envvar(), + dir1.display(), + dir2.display() + ), + ) + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn library_paths_sorted_alphabetically() { + let p = project("foo"); + + let mut dir1 = p.target_debug_dir(); + dir1.push("zzzzzzz"); + + let mut dir2 = p.target_debug_dir(); + dir2.push("BBBBBBB"); + + let mut dir3 = p.target_debug_dir(); + dir3.push("aaaaaaa"); + + let p = p.file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ).file( + "build.rs", + &format!( + r##" + fn main() {{ + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search=native={}"#); + }} + "##, + dir1.display(), + dir2.display(), + dir3.display() + ), + ) + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = std::env::var_os("{}").unwrap(); + let paths = std::env::split_paths(&search_path).collect::>(); + // ASCII case-sensitive sort + assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy()); + assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy()); + assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy()); + }} + "##, + dylib_path_envvar() + ), + ) + .build(); + + assert_that(p.cargo("run"), execs().with_status(0)); +} + +#[test] +fn fail_no_extra_verbose() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + std::process::exit(1); + } + "#, + ) + .build(); + + assert_that( + p.cargo("run").arg("-q"), + execs().with_status(1).with_stdout("").with_stderr(""), + ); +} + +#[test] +fn run_multiple_packages() { + let p = project("foo") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "../d3" } # outside of the workspace + + [[bin]] + name = "foo" + "#, + ) + .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }") + .file( + "foo/d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#, + ) + .file("foo/d1/src/lib.rs", "") + .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file( + "foo/d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + "#, + ) + .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .file( + "d3/Cargo.toml", + r#" + [package] + name = "d3" + version = "0.0.1" + authors = [] + "#, + ) + .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + let cargo = || { + let mut process_builder = p.cargo("run"); + process_builder.cwd(p.root().join("foo")); + process_builder + }; + + assert_that( + cargo().arg("-p").arg("d1"), + execs().with_status(0).with_stdout("d1"), + ); + + assert_that( + cargo().arg("-p").arg("d2").arg("--bin").arg("d2"), + execs().with_status(0).with_stdout("d2"), + ); + + assert_that(cargo(), execs().with_status(0).with_stdout("foo")); + + assert_that(cargo().arg("-p").arg("d1").arg("-p").arg("d2"), + execs() + .with_status(1) + .with_stderr_contains("error: The argument '--package ' was provided more than once, but cannot be used multiple times")); + + assert_that( + cargo().arg("-p").arg("d3"), + execs() + .with_status(101) + .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace"), + ); +} + +#[test] +fn explicit_bin_with_args() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + assert_eq!(std::env::args().nth(1).unwrap(), "hello"); + assert_eq!(std::env::args().nth(2).unwrap(), "world"); + } + "#, + ) + .build(); + + assert_that(p.cargo("run --bin foo hello world"), execs().with_status(0)); +} diff --git a/tests/testsuite/rustc.rs b/tests/testsuite/rustc.rs new file mode 100644 index 000000000..dd7d51b50 --- /dev/null +++ b/tests/testsuite/rustc.rs @@ -0,0 +1,597 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +const CARGO_RUSTC_ERROR: &'static str = + "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering +the package by passing e.g. `--lib` or `--bin NAME` to specify a single target"; + +#[test] +fn build_lib_for_foo() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc").arg("--lib").arg("-v"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ); +} + +#[test] +fn lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc") + .arg("--lib") + .arg("-v") + .arg("--") + .arg("-C") + .arg("debug-assertions=off"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions=off \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ) +} + +#[test] +fn build_main_and_allow_unstable_options() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc -v --bin foo -- -C debug-assertions"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] {name} v{version} ({url}) +[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \ + --emit=dep-info,link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps` +[RUNNING] `rustc --crate-name {name} src[/]main.rs --crate-type bin \ + --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern {name}={dir}[/]target[/]debug[/]deps[/]lib{name}-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url(), + name = "foo", + version = "0.0.1" + )), + ); +} + +#[test] +fn fails_when_trying_to_build_main_and_lib_with_args() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc") + .arg("-v") + .arg("--") + .arg("-C") + .arg("debug-assertions"), + execs().with_status(101).with_stderr(CARGO_RUSTC_ERROR), + ); +} + +#[test] +fn build_with_args_to_one_of_multiple_binaries() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/bin/foo.rs", + r#" + fn main() {} + "#, + ) + .file( + "src/bin/bar.rs", + r#" + fn main() {} + "#, + ) + .file( + "src/bin/baz.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc -v --bin bar -- -C debug-assertions"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar src[/]bin[/]bar.rs --crate-type bin --emit=dep-info,link \ + -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = p.url() + )), + ); +} + +#[test] +fn fails_with_args_to_all_binaries() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/bin/foo.rs", + r#" + fn main() {} + "#, + ) + .file( + "src/bin/bar.rs", + r#" + fn main() {} + "#, + ) + .file( + "src/bin/baz.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc") + .arg("-v") + .arg("--") + .arg("-C") + .arg("debug-assertions"), + execs().with_status(101).with_stderr(CARGO_RUSTC_ERROR), + ); +} + +#[test] +fn build_with_args_to_one_of_multiple_tests() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("tests/foo.rs", r#" "#) + .file("tests/bar.rs", r#" "#) + .file("tests/baz.rs", r#" "#) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustc -v --test bar -- -C debug-assertions"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar tests[/]bar.rs --emit=dep-info,link -C debuginfo=2 \ + -C debug-assertions [..]--test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = p.url() + )), + ); +} + +#[test] +fn build_foo_with_bar_dependency() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + bar::baz() + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustc") + .arg("-v") + .arg("--") + .arg("-C") + .arg("debug-assertions"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `[..] -C debuginfo=2 [..]` +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ); +} + +#[test] +fn build_only_bar_dependency() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + bar::baz() + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustc -v -p bar -- -C debug-assertions"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc --crate-name bar [..] --crate-type lib [..] -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn targets_selected_default() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("rustc").arg("-v"), + execs().with_status(0) + // bin + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]") + // bench + .with_stderr_does_not_contain("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C opt-level=3 --test [..]") + // unit test + .with_stderr_does_not_contain("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C debuginfo=2 --test [..]"), + ); +} + +#[test] +fn targets_selected_all() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + assert_that( + p.cargo("rustc").arg("-v").arg("--all-targets"), + execs().with_status(0) + // bin + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \ + --emit=dep-info,link[..]") + // bench + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C opt-level=3 --test [..]") + // unit test + .with_stderr_contains("\ + [RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \ + -C debuginfo=2 --test [..]"), + ); +} + +#[test] +fn fail_with_multiple_packages() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .build(); + + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from bar!"); } + } + "#, + ) + .build(); + + let _baz = project("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from baz!"); } + } + "#, + ) + .build(); + + assert_that( + foo.cargo("rustc -v -p bar -p baz"), + execs().with_status(1).with_stderr_contains( + "\ +error: The argument '--package ' was provided more than once, \ + but cannot be used multiple times +", + ), + ); +} + +#[test] +fn rustc_with_other_profile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + a = { path = "a" } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(test)] extern crate a; + + #[test] + fn foo() {} + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("rustc").arg("--profile").arg("test"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs new file mode 100644 index 000000000..06e20c20e --- /dev/null +++ b/tests/testsuite/rustdoc.rs @@ -0,0 +1,253 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn rustdoc_simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustdoc").arg("-v"), + execs().with_status(0).with_stderr(format!( + "\ +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ); +} + +#[test] +fn rustdoc_args() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"), + execs().with_status(0).with_stderr(format!( + "\ +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display(), + url = p.url() + )), + ); +} + +#[test] +fn rustdoc_foo_with_bar_dependency() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() {} + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ([..]) +[RUNNING] `rustc [..]bar[/]src[/]lib.rs [..]` +[DOCUMENTING] foo v0.0.1 ({url}) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps \ + --extern [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = foo.root().display(), + url = foo.url() + )), + ); +} + +#[test] +fn rustdoc_only_bar_dependency() { + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { + bar::baz() + } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn baz() {} + "#, + ) + .build(); + + assert_that( + foo.cargo("rustdoc") + .arg("-v") + .arg("-p") + .arg("bar") + .arg("--") + .arg("--cfg=foo"), + execs().with_status(0).with_stderr(format!( + "\ +[DOCUMENTING] bar v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name bar [..]bar[/]src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = foo.root().display() + )), + ); +} + +#[test] +fn rustdoc_same_name_documents_lib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + assert_that( + p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"), + execs().with_status(0).with_stderr(format!( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \ + -o {dir}[/]target[/]doc \ + --cfg=foo \ + -L dependency={dir}[/]target[/]debug[/]deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.root().display() + )), + ); +} + +#[test] +fn features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + quux = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("rustdoc --verbose --features quux"), + execs() + .with_status(0) + .with_stderr_contains("[..]feature=[..]quux[..]"), + ); +} diff --git a/tests/testsuite/rustdocflags.rs b/tests/testsuite/rustdocflags.rs new file mode 100644 index 000000000..3102d2408 --- /dev/null +++ b/tests/testsuite/rustdocflags.rs @@ -0,0 +1,163 @@ +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn parses_env() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`"), + ); +} + +#[test] +fn parses_config() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustdocflags = ["--cfg", "foo"] + "#, + ) + .build(); + + assert_that( + p.cargo("doc").arg("-v"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`"), + ); +} + +#[test] +fn bad_flags() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("doc").env("RUSTDOCFLAGS", "--bogus"), + execs().with_status(101), + ); +} + +#[test] +fn rerun() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"), + execs().with_status(0), + ); + assert_that( + p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + assert_that( + p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=bar"), + execs().with_status(0).with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn rustdocflags_passed_to_rustdoc_through_cargo_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! assert!(cfg!(do_not_choke)); + //! ``` + "#, + ) + .build(); + + assert_that( + p.cargo("test") + .arg("--doc") + .env("RUSTDOCFLAGS", "--cfg do_not_choke"), + execs().with_status(0), + ); +} + +#[test] +fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("test") + .arg("--doc") + .env("RUSTDOCFLAGS", "--markdown-no-toc"), + execs().with_status(0), + ); +} diff --git a/tests/testsuite/rustflags.rs b/tests/testsuite/rustflags.rs new file mode 100644 index 000000000..1e417a24b --- /dev/null +++ b/tests/testsuite/rustflags.rs @@ -0,0 +1,1621 @@ +use std::io::Write; +use std::fs::{self, File}; + +use cargotest::rustc_host; +use cargotest::support::{execs, paths, project, project_in_home}; +use hamcrest::assert_that; + +#[test] +fn env_rustflags_normal_source() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that( + p.cargo("build").env("RUSTFLAGS", "-Z bogus").arg("--lib"), + execs().with_status(101), + ); + assert_that( + p.cargo("build").env("RUSTFLAGS", "-Z bogus").arg("--bin=a"), + execs().with_status(101), + ); + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .arg("--example=b"), + execs().with_status(101), + ); + assert_that( + p.cargo("test").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101), + ); + assert_that( + p.cargo("bench").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101), + ); +} + +#[test] +fn env_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + assert_that( + foo.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn foo() { } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + assert_that( + foo.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_normal_source_with_target() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + let host = &rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .arg("--lib") + .arg("--target") + .arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .arg("--bin=a") + .arg("--target") + .arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .arg("--example=b") + .arg("--target") + .arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("test") + .env("RUSTFLAGS", "-Z bogus") + .arg("--target") + .arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("bench") + .env("RUSTFLAGS", "-Z bogus") + .arg("--target") + .arg(host), + execs().with_status(101), + ); +} + +#[test] +fn env_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .arg("--target") + .arg(host), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + foo.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .arg("--target") + .arg(host), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + p.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .arg("--target") + .arg(host), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn foo() { } + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + foo.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .arg("--target") + .arg(host), + execs().with_status(0), + ); +} + +#[test] +fn env_rustflags_recompile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + // Setting RUSTFLAGS forces a recompile + assert_that( + p.cargo("build").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101), + ); +} + +#[test] +fn env_rustflags_recompile2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); + // Setting RUSTFLAGS forces a recompile + assert_that( + p.cargo("build").env("RUSTFLAGS", "-Z bogus"), + execs().with_status(101), + ); +} + +#[test] +fn env_rustflags_no_recompile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_stdout("").with_status(0), + ); +} + +#[test] +fn build_rustflags_normal_source() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["-Z", "bogus"] + "#, + ) + .build(); + + assert_that(p.cargo("build").arg("--lib"), execs().with_status(101)); + assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101)); + assert_that( + p.cargo("build").arg("--example=b"), + execs().with_status(101), + ); + assert_that(p.cargo("test"), execs().with_status(101)); + assert_that(p.cargo("bench"), execs().with_status(101)); +} + +#[test] +fn build_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn build_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn build_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn foo() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + assert_that(foo.cargo("build"), execs().with_status(0)); +} + +#[test] +fn build_rustflags_normal_source_with_target() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["-Z", "bogus"] + "#, + ) + .build(); + + let ref host = rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + assert_that( + p.cargo("build").arg("--lib").arg("--target").arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("build").arg("--bin=a").arg("--target").arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("build") + .arg("--example=b") + .arg("--target") + .arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("test").arg("--target").arg(host), + execs().with_status(101), + ); + assert_that( + p.cargo("bench").arg("--target").arg(host), + execs().with_status(101), + ); +} + +#[test] +fn build_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + p.cargo("build").arg("--target").arg(host), + execs().with_status(0), + ); +} + +#[test] +fn build_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + foo.cargo("build").arg("--target").arg(host), + execs().with_status(0), + ); +} + +#[test] +fn build_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + p.cargo("build").arg("--target").arg(host), + execs().with_status(0), + ); +} + +#[test] +fn build_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn foo() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [lib] + name = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + assert_that( + foo.cargo("build").arg("--target").arg(host), + execs().with_status(0), + ); +} + +#[test] +fn build_rustflags_recompile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn build_rustflags_recompile2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn build_rustflags_no_recompile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_status(0), + ); + assert_that( + p.cargo("build").env("RUSTFLAGS", "--cfg foo"), + execs().with_stdout("").with_status(0), + ); +} + +#[test] +fn build_rustflags_with_home_config() { + // We need a config file inside the home directory + let home = paths::home(); + let home_config = home.join(".cargo"); + fs::create_dir(&home_config).unwrap(); + File::create(&home_config.join("config")) + .unwrap() + .write_all( + br#" + [build] + rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"] + "#, + ) + .unwrap(); + + // And we need the project to be inside the home directory + // so the walking process finds the home project twice. + let p = project_in_home("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); +} + +#[test] +fn target_rustflags_normal_source() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + &format!( + " + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", + rustc_host() + ), + ) + .build(); + + assert_that(p.cargo("build").arg("--lib"), execs().with_status(101)); + assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101)); + assert_that( + p.cargo("build").arg("--example=b"), + execs().with_status(101), + ); + assert_that(p.cargo("test"), execs().with_status(101)); + assert_that(p.cargo("bench"), execs().with_status(101)); +} + +// target.{}.rustflags takes precedence over build.rustflags +#[test] +fn target_rustflags_precedence() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [build] + rustflags = [\"--cfg\", \"foo\"] + + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", + rustc_host() + ), + ) + .build(); + + assert_that(p.cargo("build").arg("--lib"), execs().with_status(101)); + assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101)); + assert_that( + p.cargo("build").arg("--example=b"), + execs().with_status(101), + ); + assert_that(p.cargo("test"), execs().with_status(101)); + assert_that(p.cargo("bench"), execs().with_status(101)); +} + +#[test] +fn cfg_rustflags_normal_source() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + ".cargo/config", + &format!( + r#" + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, + if rustc_host().contains("-windows-") { + "windows" + } else { + "not(windows)" + } + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("--lib").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").arg("--bin=a").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").arg("--example=b").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("bench").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +// target.'cfg(...)'.rustflags takes precedence over build.rustflags +#[test] +fn cfg_rustflags_precedence() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + ".cargo/config", + &format!( + r#" + [build] + rustflags = ["--cfg", "foo"] + + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, + if rustc_host().contains("-windows-") { + "windows" + } else { + "not(windows)" + } + ), + ) + .build(); + + assert_that( + p.cargo("build").arg("--lib").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").arg("--bin=a").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").arg("--example=b").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("bench").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +", + ), + ); +} + +#[test] +fn target_rustflags_string_and_array_form1() { + let p1 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + assert_that( + p1.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + let p2 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = "--cfg foo" + "#, + ) + .build(); + + assert_that( + p2.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn target_rustflags_string_and_array_form2() { + let p1 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + rustflags = ["--cfg", "foo"] + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p1.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + let p2 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + rustflags = "--cfg foo" + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "") + .build(); + + assert_that( + p2.cargo("build").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn two_matching_in_config() { + let p1 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file( + ".cargo/config", + r#" + [target.'cfg(unix)'] + rustflags = ["--cfg", 'foo="a"'] + [target.'cfg(windows)'] + rustflags = ["--cfg", 'foo="a"'] + [target.'cfg(target_pointer_width = "32")'] + rustflags = ["--cfg", 'foo="b"'] + [target.'cfg(target_pointer_width = "64")'] + rustflags = ["--cfg", 'foo="b"'] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(foo = "a") { + println!("a"); + } else if cfg!(foo = "b") { + println!("b"); + } else { + panic!() + } + } + "#, + ) + .build(); + + assert_that(p1.cargo("run"), execs().with_status(0)); + assert_that( + p1.cargo("build"), + execs().with_status(0).with_stderr("[FINISHED] [..]"), + ); +} diff --git a/tests/testsuite/search.rs b/tests/testsuite/search.rs new file mode 100644 index 000000000..a0904da28 --- /dev/null +++ b/tests/testsuite/search.rs @@ -0,0 +1,341 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::PathBuf; + +use cargo::util::ProcessBuilder; +use cargotest; +use cargotest::support::execs; +use cargotest::support::git::repo; +use cargotest::support::paths; +use hamcrest::assert_that; +use url::Url; + +fn registry_path() -> PathBuf { + paths::root().join("registry") +} +fn registry() -> Url { + Url::from_file_path(&*registry_path()).ok().unwrap() +} +fn api_path() -> PathBuf { + paths::root().join("api") +} +fn api() -> Url { + Url::from_file_path(&*api_path()).ok().unwrap() +} + +fn setup() { + let config = paths::root().join(".cargo/config"); + fs::create_dir_all(config.parent().unwrap()).unwrap(); + fs::create_dir_all(&api_path().join("api/v1")).unwrap(); + + let _ = repo(®istry_path()) + .file( + "config.json", + &format!( + r#"{{ + "dl": "{0}", + "api": "{0}" + }}"#, + api() + ), + ) + .build(); +} + +fn cargo_process(s: &str) -> ProcessBuilder { + let mut b = cargotest::cargo_process(); + b.arg(s); + b +} + +#[test] +fn simple() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + } + + assert_that( + cargo_process("search") + .arg("postgres") + .arg("--index") + .arg(registry().to_string()), + execs().with_status(0).with_stdout_contains( + "hoare = \"0.1.1\" # Design by contract style assertions for Rust", + ), + ); +} + +// TODO: Deprecated +// remove once it has been decided '--host' can be safely removed +#[test] +fn simple_with_host() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + } + + assert_that( + cargo_process("search") + .arg("postgres") + .arg("--host") + .arg(registry().to_string()), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +", + reg = registry() + )) + .with_stdout_contains( + "hoare = \"0.1.1\" # Design by contract style assertions for Rust", + ), + ); +} + +// TODO: Deprecated +// remove once it has been decided '--host' can be safely removed +#[test] +fn simple_with_index_and_host() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres&per_page=10")) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + } + + assert_that( + cargo_process("search") + .arg("postgres") + .arg("--index") + .arg(registry().to_string()) + .arg("--host") + .arg(registry().to_string()), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] registry `{reg}` +", + reg = registry() + )) + .with_stdout_contains( + "hoare = \"0.1.1\" # Design by contract style assertions for Rust", + ), + ); +} + +#[test] +fn multiple_query_params() { + setup(); + + let contents = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + let base = api_path().join("api/v1/crates"); + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&base) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + if !cfg!(windows) { + File::create(&base.with_file_name("crates?q=postgres+sql&per_page=10")) + .unwrap() + .write_all(contents.as_bytes()) + .unwrap(); + } + + assert_that( + cargo_process("search") + .arg("postgres") + .arg("sql") + .arg("--index") + .arg(registry().to_string()), + execs().with_status(0).with_stdout_contains( + "hoare = \"0.1.1\" # Design by contract style assertions for Rust", + ), + ); +} + +#[test] +fn help() { + assert_that(cargo_process("search").arg("-h"), execs().with_status(0)); + assert_that(cargo_process("help").arg("search"), execs().with_status(0)); + // Ensure that help output goes to stdout, not stderr. + assert_that( + cargo_process("search").arg("--help"), + execs().with_stderr(""), + ); + assert_that( + cargo_process("search").arg("--help"), + execs().with_stdout_contains("[..] --frozen [..]"), + ); +} diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs new file mode 100644 index 000000000..454660e0e --- /dev/null +++ b/tests/testsuite/small_fd_limits.rs @@ -0,0 +1,118 @@ +use std::env; +use std::ffi::OsStr; +use std::path::PathBuf; +use std::process::Command; + +use git2; +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use cargotest::support::paths; +use cargotest::support::git; +use hamcrest::assert_that; + +use url::Url; + +fn find_index() -> PathBuf { + let dir = paths::home().join(".cargo/registry/index"); + dir.read_dir().unwrap().next().unwrap().unwrap().path() +} + +fn run_test(path_env: Option<&OsStr>) { + const N: usize = 50; + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + Package::new("bar", "0.1.0").publish(); + + assert_that(foo.cargo("build"), execs().with_status(0)); + + let index = find_index(); + let path = paths::home().join("tmp"); + let url = Url::from_file_path(&path).unwrap().to_string(); + let repo = git2::Repository::init(&path).unwrap(); + let index = git2::Repository::open(&index).unwrap(); + let mut cfg = repo.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + let mut cfg = index.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + + for _ in 0..N { + git::commit(&repo); + index + .remote_anonymous(&url) + .unwrap() + .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None) + .unwrap(); + } + drop((repo, index)); + Package::new("bar", "0.1.1").publish(); + + let before = find_index() + .join(".git/objects/pack") + .read_dir() + .unwrap() + .count(); + assert!(before > N); + + let mut cmd = foo.cargo("update"); + cmd.env("__CARGO_PACKFILE_LIMIT", "10"); + if let Some(path) = path_env { + cmd.env("PATH", path); + } + cmd.env("RUST_LOG", "trace"); + assert_that(cmd, execs().with_status(0)); + let after = find_index() + .join(".git/objects/pack") + .read_dir() + .unwrap() + .count(); + assert!( + after < before, + "packfiles before: {}\n\ + packfiles after: {}", + before, + after + ); +} + +#[test] +fn use_git_gc() { + if Command::new("git").arg("--version").output().is_err() { + return; + } + run_test(None); +} + +#[test] +// it looks like this test passes on some windows machines but not others, +// notably not on AppVeyor's machines. Sounds like another but for another day. +#[cfg_attr(windows, ignore)] +fn avoid_using_git() { + let path = env::var_os("PATH").unwrap_or_default(); + let mut paths = env::split_paths(&path).collect::>(); + let idx = paths + .iter() + .position(|p| p.join("git").exists() || p.join("git.exe").exists()); + match idx { + Some(i) => { + paths.remove(i); + } + None => return, + } + run_test(Some(&env::join_paths(&paths).unwrap())); +} diff --git a/tests/testsuite/test.rs b/tests/testsuite/test.rs new file mode 100644 index 000000000..608eb9faf --- /dev/null +++ b/tests/testsuite/test.rs @@ -0,0 +1,4027 @@ +use std::fs::File; +use std::io::prelude::*; +use std::str; + +use cargo; +use cargotest::{is_nightly, rustc_host, sleep_ms}; +use cargotest::support::{basic_bin_manifest, basic_lib_manifest, cargo_exe, execs, project}; +use cargotest::support::paths::CargoPathExt; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_file, is_not}; +use cargo::util::process; + +#[test] +fn cargo_test_simple() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "hello") + }"#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n"), + ); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.5.0 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + p.url() + )) + .with_stdout_contains("test test_hello ... ok"), + ); +} + +#[test] +fn cargo_test_release() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() { bar::bar(); } + + #[test] + fn test() { foo(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo; + + #[test] + fn test() { foo::foo(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + assert_that( + p.cargo("test").arg("-v").arg("--release"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.1.0 ({dir}) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE]` +[RUNNING] `[..]target[/]release[/]deps[/]test-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]lib.rs[..]`", + dir = p.url() + )) + .with_stdout_contains_n("test test ... ok", 2) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn cargo_test_overflow_checks() { + if !is_nightly() { + return; + } + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + + [[bin]] + name = "foo" + + [profile.release] + overflow-checks = true + "#, + ) + .file( + "src/foo.rs", + r#" + use std::panic; + pub fn main() { + let r = panic::catch_unwind(|| { + [1, i32::max_value()].iter().sum::(); + }); + assert!(r.is_err()); + }"#, + ) + .build(); + + assert_that(p.cargo("build").arg("--release"), execs().with_status(0)); + assert_that(&p.release_bin("foo"), existing_file()); + + assert_that( + process(&p.release_bin("foo")), + execs().with_status(0).with_stdout(""), + ); +} + +#[test] +fn cargo_test_verbose() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() {} + #[test] fn test_hello() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("-v").arg("hello"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target[/]debug[/]deps[/]foo-[..][EXE] hello`", + url = p.url() + )) + .with_stdout_contains("test test_hello ... ok"), + ); +} + +#[test] +fn many_similar_names() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + #[test] fn lib_test() {} + ", + ) + .file( + "src/main.rs", + " + extern crate foo; + fn main() {} + #[test] fn bin_test() { foo::foo() } + ", + ) + .file( + "tests/foo.rs", + r#" + extern crate foo; + #[test] fn test_test() { foo::foo() } + "#, + ) + .build(); + + let output = p.cargo("test").arg("-v").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("test bin_test"), + "bin_test missing\n{}", + output + ); + assert!( + output.contains("test lib_test"), + "lib_test missing\n{}", + output + ); + assert!( + output.contains("test test_test"), + "test_test missing\n{}", + output + ); +} + +#[test] +fn cargo_test_failing_test_in_bin() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "nope") + }"#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n"), + ); + + assert_that( + p.cargo("test"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[ERROR] test failed, to rerun pass '--bin foo'", + url = p.url() + )) + .with_stdout_contains( + " +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed:[..]", + ) + .with_stdout_contains("[..]`(left == right)`[..]") + .with_stdout_contains("[..]left: `\"hello\"`,[..]") + .with_stdout_contains("[..]right: `\"nope\"`[..]") + .with_stdout_contains("[..]src[/]main.rs:12[..]") + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101), + ); +} + +#[test] +fn cargo_test_failing_test_in_test() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + pub fn main() { + println!("hello"); + }"#, + ) + .file( + "tests/footest.rs", + r#" + #[test] + fn test_hello() { + assert!(false) + }"#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + assert_that( + process(&p.bin("foo")), + execs().with_status(0).with_stdout("hello\n"), + ); + + assert_that( + p.cargo("test"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]footest-[..][EXE] +[ERROR] test failed, to rerun pass '--test footest'", + url = p.url() + )) + .with_stdout_contains("running 0 tests") + .with_stdout_contains( + "\ +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed: false', \ + tests[/]footest.rs:4[..] +", + ) + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101), + ); +} + +#[test] +fn cargo_test_failing_test_in_lib() { + let p = project("foo") + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file( + "src/lib.rs", + r#" + #[test] + fn test_hello() { + assert!(false) + }"#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_stderr(format!( + "\ +[COMPILING] foo v0.5.0 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[ERROR] test failed, to rerun pass '--lib'", + url = p.url() + )) + .with_stdout_contains( + "\ +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +thread 'test_hello' panicked at 'assertion failed: false', \ + src[/]lib.rs:4[..] +", + ) + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101), + ); +} + +#[test] +fn test_with_lib_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{:?}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[test] fn lib_test() {} + "#, + ) + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate foo; + + fn main() {} + + #[test] + fn bin_test() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]baz-[..][EXE] +[DOCTEST] foo", + p.url() + )) + .with_stdout_contains("test lib_test ... ok") + .with_stdout_contains("test bin_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3), + ); +} + +#[test] +fn test_with_deep_lib_dep() { + let p = project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#, + ) + .file( + "src/lib.rs", + " + #[cfg(test)] + extern crate foo; + /// ``` + /// bar::bar(); + /// ``` + pub fn bar() {} + + #[test] + fn bar_test() { + foo::foo(); + } + ", + ) + .build(); + let _p2 = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + + #[test] + fn foo_test() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..] +[DOCTEST] bar", + dir = p.url() + )) + .with_stdout_contains("test bar_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 2), + ); +} + +#[test] +fn external_test_explicit() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + path = "src/test.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#, + ) + .file( + "src/test.rs", + r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE] +[DOCTEST] foo", + p.url() + )) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn external_test_named_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + "#, + ) + .file("src/lib.rs", "") + .file( + "tests/test.rs", + r#" + #[test] + fn foo() { } + "#, + ) + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)) +} + +#[test] +fn external_test_implicit() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#, + ) + .file( + "tests/external.rs", + r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]external-[..][EXE] +[DOCTEST] foo", + p.url() + )) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn dont_run_examples() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + "#, + ) + .file( + "examples/dont-run-me-i-will-fail.rs", + r#" + fn main() { panic!("Examples should not be run by 'cargo test'"); } + "#, + ) + .build(); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn pass_through_command_line() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #[test] fn foo() {} + #[test] fn bar() {} + ", + ) + .build(); + + assert_that( + p.cargo("test").arg("bar"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + dir = p.url() + )) + .with_stdout_contains("test bar ... ok") + .with_stdout_contains("running 0 tests"), + ); + + assert_that( + p.cargo("test").arg("foo"), + execs() + .with_status(0) + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests"), + ); +} + +// Regression test for running cargo-test twice with +// tests in an rlib +#[test] +fn cargo_test_twice() { + let p = project("test_twice") + .file("Cargo.toml", &basic_lib_manifest("test_twice")) + .file( + "src/test_twice.rs", + r#" + #![crate_type = "rlib"] + + #[test] + fn dummy_test() { } + "#, + ) + .build(); + + p.cargo("build"); + + for _ in 0..2 { + assert_that(p.cargo("test"), execs().with_status(0)); + } +} + +#[test] +fn lib_bin_same_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#, + ) + .file( + "src/lib.rs", + " + #[test] fn lib_test() {} + ", + ) + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate foo; + + #[test] + fn bin_test() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + p.url() + )) + .with_stdout_contains_n("test [..] ... ok", 2) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn lib_with_standard_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[test] + fn foo_test() {} + ", + ) + .file( + "tests/test.rs", + " + extern crate syntax; + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE] +[DOCTEST] syntax", + dir = p.url() + )) + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3), + ); +} + +#[test] +fn lib_with_standard_name2() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + test = false + doctest = false + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test test ... ok"), + ); +} + +#[test] +fn lib_without_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] syntax v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("test test ... ok"), + ); +} + +#[test] +fn bin_without_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bin]] + path = "src/main.rs" + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target bin.name is required", + ), + ); +} + +#[test] +fn bench_without_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bench]] + path = "src/bench.rs" + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "src/bench.rs", + " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + benchmark target bench.name is required", + ), + ); +} + +#[test] +fn test_without_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[test]] + path = "src/test.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + pub fn get_hello() -> &'static str { "Hello" } + "#, + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "src/test.rs", + r#" + extern crate syntax; + + #[test] + fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + test target test.name is required", + ), + ); +} + +#[test] +fn example_without_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[example]] + path = "examples/example.rs" + "#, + ) + .file( + "src/lib.rs", + " + pub fn foo() {} + ", + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "examples/example.rs", + r#" + extern crate syntax; + + fn main() { + println!("example1"); + } + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + example target example.name is required", + ), + ); +} + +#[test] +fn bin_there_for_integration() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + " + fn main() { std::process::exit(101); } + #[test] fn main_test() {} + ", + ) + .file( + "tests/foo.rs", + r#" + use std::process::Command; + #[test] + fn test_test() { + let status = Command::new("target/debug/foo").status().unwrap(); + assert_eq!(status.code(), Some(101)); + } + "#, + ) + .build(); + + let output = p.cargo("test").arg("-v").exec_with_output().unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("main_test ... ok"), + "no main_test\n{}", + output + ); + assert!( + output.contains("test_test ... ok"), + "no test_test\n{}", + output + ); +} + +#[test] +fn test_dylib() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file( + "bar/src/lib.rs", + " + pub fn baz() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ({dir}/bar) +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains_n("test foo ... ok", 2), + ); + + p.root().move_into_the_past(); + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]", + ) + .with_stdout_contains_n("test foo ... ok", 2), + ); +} + +#[test] +fn test_twice_with_build_cmd() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file( + "src/lib.rs", + " + #[test] + fn foo() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + dir = p.url() + )) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests"), + ); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn test_then_build() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #[test] + fn foo() {} + ", + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[DOCTEST] foo", + dir = p.url() + )) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests"), + ); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn test_no_run() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + " + #[test] + fn foo() { panic!() } + ", + ) + .build(); + + assert_that( + p.cargo("test").arg("--no-run"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); +} + +#[test] +fn test_run_specific_bin_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="bin1" + path="src/bin1.rs" + + [[bin]] + name="bin2" + path="src/bin2.rs" + "#, + ) + .file("src/bin1.rs", "#[test] fn test1() { }") + .file("src/bin2.rs", "#[test] fn test2() { }") + .build(); + + assert_that( + prj.cargo("test").arg("--bin").arg("bin2"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]bin2-[..][EXE]", + dir = prj.url() + )) + .with_stdout_contains("test test2 ... ok"), + ); +} + +#[test] +fn test_run_implicit_bin_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + assert_that( + prj.cargo("test").arg("--bins"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]", + dir = prj.url() + )) + .with_stdout_contains("test test_in_bin ... ok"), + ); +} + +#[test] +fn test_run_specific_test_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/a.rs", "fn main() { }") + .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") + .file("tests/a.rs", "#[test] fn test_a() { }") + .file("tests/b.rs", "#[test] fn test_b() { }") + .build(); + + assert_that( + prj.cargo("test").arg("--test").arg("b"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]", + dir = prj.url() + )) + .with_stdout_contains("test test_b ... ok"), + ); +} + +#[test] +fn test_run_implicit_test_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + assert_that( + prj.cargo("test").arg("--tests"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]mytest-[..][EXE] +[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]", + dir = prj.url() + )) + .with_stdout_contains("test test_in_test ... ok"), + ); +} + +#[test] +fn test_run_implicit_bench_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + assert_that( + prj.cargo("test").arg("--benches"), + execs() + .with_status(0) + .with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]mybench-[..][EXE]", + dir = prj.url() + )) + .with_stdout_contains("test test_in_bench ... ok"), + ); +} + +#[test] +fn test_run_implicit_example_target() { + let prj = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + assert_that( + prj.cargo("test").arg("--examples"), + execs().with_status(0).with_stderr(format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]", + dir = prj.url() + )), + ); +} + +#[test] +fn test_no_harness() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + test = false + + [[test]] + name = "bar" + path = "foo.rs" + harness = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("foo.rs", "fn main() {}") + .build(); + + assert_that( + p.cargo("test").arg("--").arg("--nocapture"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]bar-[..][EXE] +", + dir = p.url() + )), + ); +} + +#[test] +fn selective_testing() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d1/src/main.rs", + "#[allow(unused_extern_crates)] extern crate d1; fn main() {}", + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/lib.rs", "") + .file( + "d2/src/main.rs", + "#[allow(unused_extern_crates)] extern crate d2; fn main() {}", + ); + let p = p.build(); + + println!("d1"); + assert_that( + p.cargo("test").arg("-p").arg("d1"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] d1 v0.0.1 ({dir}/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains_n("running 0 tests", 2), + ); + + println!("d2"); + assert_that( + p.cargo("test").arg("-p").arg("d2"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] d2 v0.0.1 ({dir}/d2) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains_n("running 0 tests", 2), + ); + + println!("whole"); + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", + dir = p.url() + )) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn almost_cyclic_but_not_quite() { + let p = project("a") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dev-dependencies.b] + path = "b" + [dev-dependencies.c] + path = "c" + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] extern crate b; + #[cfg(test)] extern crate c; + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = ".." + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[allow(unused_extern_crates)] + extern crate a; + "#, + ) + .file( + "c/Cargo.toml", + r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn build_then_selective_test() { + let p = project("a") + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "b" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate b;", + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate b; + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + p.root().move_into_the_past(); + assert_that(p.cargo("test").arg("-p").arg("b"), execs().with_status(0)); +} + +#[test] +fn example_dev_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + "#, + ) + .file( + "examples/e1.rs", + r#" + extern crate bar; + fn main() { } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + // make sure this file takes awhile to compile + macro_rules! f0( () => (1) ); + macro_rules! f1( () => ({(f0!()) + (f0!())}) ); + macro_rules! f2( () => ({(f1!()) + (f1!())}) ); + macro_rules! f3( () => ({(f2!()) + (f2!())}) ); + macro_rules! f4( () => ({(f3!()) + (f3!())}) ); + macro_rules! f5( () => ({(f4!()) + (f4!())}) ); + macro_rules! f6( () => ({(f5!()) + (f5!())}) ); + macro_rules! f7( () => ({(f6!()) + (f6!())}) ); + macro_rules! f8( () => ({(f7!()) + (f7!())}) ); + pub fn bar() { + f8!(); + } + "#, + ) + .build(); + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that( + p.cargo("run") + .arg("--example") + .arg("e1") + .arg("--release") + .arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn selective_testing_with_docs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// not valid rust + /// ``` + pub fn foo() {} + "#, + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + path = "d1.rs" + "#, + ) + .file("d1/d1.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("test").arg("-p").arg("d1"), + execs() + .with_status(0) + .with_stderr(&format!( + "\ +[COMPILING] d1 v0.0.1 ({dir}/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]d1[..][EXE] +[DOCTEST] d1", + dir = p.url() + )) + .with_stdout_contains_n("running 0 tests", 2), + ); +} + +#[test] +fn example_bin_same_name() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + assert_that( + p.cargo("test").arg("--no-run").arg("-v"), + execs().with_status(0).with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({dir}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url() + )), + ); + + assert_that(&p.bin("foo"), is_not(existing_file())); + assert_that(&p.bin("examples/foo"), existing_file()); + + assert_that( + p.process(&p.bin("examples/foo")), + execs().with_status(0).with_stdout("example\n"), + ); + + assert_that( + p.cargo("run"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..]", + ) + .with_stdout("bin"), + ); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn test_with_example_twice() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + println!("first"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); + println!("second"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("examples/foo"), existing_file()); +} + +#[test] +fn example_with_dev_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + test = false + doctest = false + + [dev-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "examples/ex.rs", + "#[allow(unused_extern_crates)] extern crate a; fn main() {}", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("test").arg("-v"), + execs().with_status(0).with_stderr( + "\ +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name ex [..] --extern a=[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn bin_is_preserved() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + assert_that(p.cargo("build").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + + println!("testing"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); +} + +#[test] +fn bad_example() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("run").arg("--example").arg("foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] no example target named `foo`"), + ); + assert_that( + p.cargo("run").arg("--bin").arg("foo"), + execs() + .with_status(101) + .with_stderr("[ERROR] no bin target named `foo`"), + ); +} + +#[test] +fn doctest_feature() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [features] + bar = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// ```rust + /// assert_eq!(foo::foo(), 1); + /// ``` + #[cfg(feature = "bar")] + pub fn foo() -> i32 { 1 } + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--features").arg("bar"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok"), + ); +} + +#[test] +fn dashes_to_underscores() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo-bar" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// assert_eq!(foo_bar::foo(), 1); + /// ``` + pub fn foo() -> i32 { 1 } + "#, + ) + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn doctest_dev_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn filter_no_doc_tests() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#, + ) + .file("tests/foo.rs", "") + .build(); + + assert_that( + p.cargo("test").arg("--test=foo"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]", + ) + .with_stdout_contains("running 0 tests"), + ); +} + +#[test] +fn dylib_doctest() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["rlib", "dylib"] + test = false + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo", + ) + .with_stdout_contains("test [..] ... ok"), + ); +} + +#[test] +fn dylib_doctest2() { + // can't doctest dylibs as they're statically linked together + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib"] + test = false + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#, + ) + .build(); + + assert_that(p.cargo("test"), execs().with_status(0).with_stdout("")); +} + +#[test] +fn cyclic_dev_dep_doc_test() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! extern crate bar; + //! ``` + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + "#, + ) + .build(); + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok"), + ); +} + +#[test] +fn dev_dep_with_build_script() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/build.rs", "fn main() {}") + .build(); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn no_fail_fast() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn add_one(x: i32) -> i32{ + x + 1 + } + + /// ```rust + /// use foo::sub_one; + /// assert_eq!(sub_one(101), 100); + /// ``` + pub fn sub_one(x: i32) -> i32{ + x - 1 + } + "#, + ) + .file( + "tests/test_add_one.rs", + r#" + extern crate foo; + use foo::*; + + #[test] + fn add_one_test() { + assert_eq!(add_one(1), 2); + } + + #[test] + fn fail_add_one_test() { + assert_eq!(add_one(1), 1); + } + "#, + ) + .file( + "tests/test_sub_one.rs", + r#" + extern crate foo; + use foo::*; + + #[test] + fn sub_one_test() { + assert_eq!(sub_one(1), 0); + } + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("--no-fail-fast"), + execs() + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE] +[RUNNING] target[/]debug[/]deps[/]test_add_one-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .with_stderr_contains( + "\ +[RUNNING] target[/]debug[/]deps[/]test_sub_one-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test result: FAILED. [..]") + .with_stdout_contains("test sub_one_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3), + ); +} + +#[test] +fn test_multiple_packages() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"), + execs() + .with_status(0) + .with_stderr_contains("[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]") + .with_stderr_contains("[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]") + .with_stdout_contains_n("running 0 tests", 2), + ); +} + +#[test] +fn bin_does_not_rebuild_tests() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/foo.rs", ""); + let p = p.build(); + + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + + sleep_ms(1000); + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all(b"fn main() { 3; }") + .unwrap(); + + assert_that( + p.cargo("test").arg("-v").arg("--no-run"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src[/]main.rs [..]` +[RUNNING] `rustc [..] src[/]main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn selective_test_wonky_profile() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.release] + opt-level = 2 + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("test -v --no-run --release -p foo -p a"), + execs().with_status(0), + ); +} + +#[test] +fn selective_test_optional_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a", optional = true } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("test") + .arg("-v") + .arg("--no-run") + .arg("--features") + .arg("a") + .arg("-p") + .arg("a"), + execs().with_status(0).with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]` +[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn only_test_docs() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[test] + fn foo() { + let a: u32 = "hello"; + } + + /// ``` + /// foo::bar(); + /// println!("ok"); + /// ``` + pub fn bar() { + } + "#, + ) + .file("tests/foo.rs", "this is not rust"); + let p = p.build(); + + assert_that( + p.cargo("test").arg("--doc"), + execs() + .with_status(0) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo", + ) + .with_stdout_contains("test [..] ... ok"), + ); +} + +#[test] +fn test_panic_abort_with_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [profile.dev] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + + #[test] + fn foo() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); +} + +#[test] +fn cfg_test_even_with_no_harness() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + harness = false + doctest = false + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] + fn main() { + println!("hello!"); + } + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("-v"), + execs().with_status(0).with_stdout("hello!\n").with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]` +", + ), + ); +} + +#[test] +fn panic_abort_multiple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.release] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + assert_that( + p.cargo("test --release -v -p foo -p a"), + execs().with_status(0), + ); +} + +#[test] +fn pass_correct_cfgs_flags_to_rustdoc() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + default = ["feature_a/default"] + nightly = ["feature_a/nightly"] + + [dependencies.feature_a] + path = "libs/feature_a" + default-features = false + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + assert!(true); + } + } + "#, + ) + .file( + "libs/feature_a/Cargo.toml", + r#" + [package] + name = "feature_a" + version = "0.1.0" + authors = [] + + [features] + default = ["mock_serde_codegen"] + nightly = ["mock_serde_derive"] + + [dependencies] + mock_serde_derive = { path = "../mock_serde_derive", optional = true } + + [build-dependencies] + mock_serde_codegen = { path = "../mock_serde_codegen", optional = true } + "#, + ) + .file( + "libs/feature_a/src/lib.rs", + r#" + #[cfg(feature = "mock_serde_derive")] + const MSG: &'static str = "This is safe"; + + #[cfg(feature = "mock_serde_codegen")] + const MSG: &'static str = "This is risky"; + + pub fn get() -> &'static str { + MSG + } + "#, + ) + .file( + "libs/mock_serde_derive/Cargo.toml", + r#" + [package] + name = "mock_serde_derive" + version = "0.1.0" + authors = [] + "#, + ) + .file("libs/mock_serde_derive/src/lib.rs", "") + .file( + "libs/mock_serde_codegen/Cargo.toml", + r#" + [package] + name = "mock_serde_codegen" + version = "0.1.0" + authors = [] + "#, + ) + .file("libs/mock_serde_codegen/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("test") + .arg("--package") + .arg("feature_a") + .arg("--verbose"), + execs().with_status(0).with_stderr_contains( + "\ +[DOCTEST] feature_a +[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`", + ), + ); + + assert_that( + p.cargo("test").arg("--verbose"), + execs().with_status(0).with_stderr_contains( + "\ +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]feature_a[..]`", + ), + ); +} + +#[test] +fn test_release_ignore_panic() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.test] + panic = 'abort' + [profile.release] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", ""); + let p = p.build(); + println!("test"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); + println!("bench"); + assert_that(p.cargo("bench").arg("-v"), execs().with_status(0)); +} + +#[test] +fn test_many_with_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [features] + foo = [] + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + assert_that( + p.cargo("test -v -p a -p foo --features foo"), + execs().with_status(0), + ); +} + +#[test] +fn test_all_workspace() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file( + "src/main.rs", + r#" + #[test] + fn foo_test() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[test] + fn bar_test() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--all"), + execs() + .with_status(0) + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test bar_test ... ok"), + ); +} + +#[test] +fn test_all_exclude() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[test] + pub fn bar() {} + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + "#, + ) + .file( + "baz/src/lib.rs", + r#" + #[test] + pub fn baz() { + assert!(false); + } + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--all").arg("--exclude").arg("baz"), + execs().with_status(0).with_stdout_contains( + "running 1 test +test bar ... ok", + ), + ); +} + +#[test] +fn test_all_virtual_manifest() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + #[test] + fn a() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[test] + fn b() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--all"), + execs() + .with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok"), + ); +} + +#[test] +fn test_virtual_manifest_all_implied() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + #[test] + fn a() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[test] + fn b() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test"), + execs() + .with_status(0) + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok"), + ); +} + +#[test] +fn test_all_member_dependency_same_name() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + #[test] + fn a() {} + "#, + ) + .build(); + + Package::new("a", "0.1.0").publish(); + + assert_that( + p.cargo("test").arg("--all"), + execs().with_status(0).with_stdout_contains("test a ... ok"), + ); +} + +#[test] +fn doctest_only_with_dev_dep() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// + /// b::b(); + /// ``` + pub fn a() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + "#, + ) + .file( + "b/src/lib.rs", + r#" + pub fn b() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--doc").arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn test_many_targets() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "src/bin/a.rs", + r#" + fn main() {} + #[test] fn bin_a() {} + "#, + ) + .file( + "src/bin/b.rs", + r#" + fn main() {} + #[test] fn bin_b() {} + "#, + ) + .file( + "src/bin/c.rs", + r#" + fn main() {} + #[test] fn bin_c() { panic!(); } + "#, + ) + .file( + "examples/a.rs", + r#" + fn main() {} + #[test] fn example_a() {} + "#, + ) + .file( + "examples/b.rs", + r#" + fn main() {} + #[test] fn example_b() {} + "#, + ) + .file( + "examples/c.rs", + r#" + #[test] fn example_c() { panic!(); } + "#, + ) + .file( + "tests/a.rs", + r#" + #[test] fn test_a() {} + "#, + ) + .file( + "tests/b.rs", + r#" + #[test] fn test_b() {} + "#, + ) + .file( + "tests/c.rs", + r#" + does not compile + "#, + ) + .build(); + + assert_that( + p.cargo("test") + .arg("--verbose") + .arg("--bin") + .arg("a") + .arg("--bin") + .arg("b") + .arg("--example") + .arg("a") + .arg("--example") + .arg("b") + .arg("--test") + .arg("a") + .arg("--test") + .arg("b"), + execs() + .with_status(0) + .with_stdout_contains("test bin_a ... ok") + .with_stdout_contains("test bin_b ... ok") + .with_stdout_contains("test test_a ... ok") + .with_stdout_contains("test test_b ... ok") + .with_stderr_contains("[RUNNING] `rustc --crate-name a examples[/]a.rs [..]`") + .with_stderr_contains("[RUNNING] `rustc --crate-name b examples[/]b.rs [..]`"), + ) +} + +#[test] +fn doctest_and_registry() { + let p = project("workspace") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + b = { path = "b" } + c = { path = "c" } + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + "#, + ) + .file( + "b/src/lib.rs", + " + /// ``` + /// b::foo(); + /// ``` + pub fn foo() {} + ", + ) + .file( + "c/Cargo.toml", + r#" + [project] + name = "c" + version = "0.1.0" + + [dependencies] + b = "0.1" + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + Package::new("b", "0.1.0").publish(); + + assert_that( + p.cargo("test").arg("--all").arg("-v"), + execs().with_status(0), + ); +} + +#[test] +fn cargo_test_env() { + let src = format!( + r#" + #![crate_type = "rlib"] + + #[test] + fn env_test() {{ + use std::env; + println!("{{}}", env::var("{}").unwrap()); + }} + "#, + cargo::CARGO_ENV + ); + + let p = project("env_test") + .file("Cargo.toml", &basic_lib_manifest("env_test")) + .file("src/lib.rs", &src) + .build(); + + let mut pr = p.cargo("test"); + let cargo = cargo_exe().canonicalize().unwrap(); + assert_that( + pr.args(&["--lib", "--", "--nocapture"]), + execs().with_status(0).with_stdout_contains(format!( + "\ +{} +test env_test ... ok +", + cargo.to_str().unwrap() + )), + ); +} + +#[test] +fn test_order() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + r#" + #[test] fn test_lib() {} + "#, + ) + .file( + "tests/a.rs", + r#" + #[test] fn test_a() {} + "#, + ) + .file( + "tests/z.rs", + r#" + #[test] fn test_z() {} + "#, + ) + .build(); + + assert_that( + p.cargo("test").arg("--all"), + execs().with_status(0).with_stdout_contains( + " +running 1 test +test test_lib ... ok + +test result: ok. [..] + + +running 1 test +test test_a ... ok + +test result: ok. [..] + + +running 1 test +test test_z ... ok + +test result: ok. [..] +", + ), + ); +} + +#[test] +fn cyclic_dev() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dev-dependencies] + foo = { path = "." } + "#, + ) + .file( + "src/lib.rs", + r#" + #[test] fn test_lib() {} + "#, + ) + .file( + "tests/foo.rs", + r#" + extern crate foo; + "#, + ) + .build(); + + assert_that(p.cargo("test").arg("--all"), execs().with_status(0)); +} + +#[test] +fn publish_a_crate_without_tests() { + Package::new("testless", "0.1.0") + .file("Cargo.toml", r#" + [project] + name = "testless" + version = "0.1.0" + exclude = ["tests/*"] + + [[test]] + name = "a_test" + "#) + .file("src/lib.rs", "") + + // In real life, the package will have a test, + // which would be excluded from .crate file by the + // `exclude` field. Our test harness does not honor + // exclude though, so let's just not add the file! + // .file("tests/a_test.rs", "") + + .publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("test"), execs().with_status(0)); + assert_that( + p.cargo("test").arg("--package").arg("testless"), + execs().with_status(0), + ); +} + +#[test] +fn find_dependency_of_proc_macro_dependency_with_target() { + let workspace = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["root", "proc_macro_dep"] + "#, + ) + .file( + "root/Cargo.toml", + r#" + [project] + name = "root" + version = "0.1.0" + authors = [] + + [dependencies] + proc_macro_dep = { path = "../proc_macro_dep" } + "#, + ) + .file( + "root/src/lib.rs", + r#" + #[macro_use] + extern crate proc_macro_dep; + + #[derive(Noop)] + pub struct X; + "#, + ) + .file( + "proc_macro_dep/Cargo.toml", + r#" + [project] + name = "proc_macro_dep" + version = "0.1.0" + authors = [] + + [lib] + proc-macro = true + + [dependencies] + bar = "^0.1" + "#, + ) + .file( + "proc_macro_dep/src/lib.rs", + r#" + extern crate bar; + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0") + .dep("foo", "0.1") + .file("src/lib.rs", "extern crate foo;") + .publish(); + assert_that( + workspace + .cargo("test") + .arg("--all") + .arg("--target") + .arg(rustc_host()), + execs().with_status(0), + ); +} + +#[test] +fn test_hint_not_masked_by_doctest() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// assert_eq!(1, 1); + /// ``` + pub fn this_works() {} + "#, + ) + .file( + "tests/integ.rs", + r#" + #[test] + fn this_fails() { + panic!(); + } + "#, + ) + .build(); + assert_that( + p.cargo("test").arg("--no-fail-fast"), + execs() + .with_status(101) + .with_stdout_contains("test this_fails ... FAILED") + .with_stdout_contains("[..]this_works (line [..]ok") + .with_stderr_contains( + "[ERROR] test failed, to rerun pass \ + '--test integ'", + ), + ); +} + +#[test] +fn test_hint_workspace() { + let workspace = project("workspace") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + r#" + #[test] + fn t1() {} + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[test] + fn t1() {assert!(false)} + "#, + ) + .build(); + + assert_that( + workspace.cargo("test"), + execs() + .with_stderr_contains("[ERROR] test failed, to rerun pass '-p b --lib'") + .with_status(101), + ); +} diff --git a/tests/testsuite/tool_paths.rs b/tests/testsuite/tool_paths.rs new file mode 100644 index 000000000..3fe4b55ec --- /dev/null +++ b/tests/testsuite/tool_paths.rs @@ -0,0 +1,256 @@ +use cargotest::rustc_host; +use cargotest::support::{execs, project, path2url}; +use hamcrest::assert_that; + +#[test] +fn pathless_tools() { + let target = rustc_host(); + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, + target + ), + ) + .build(); + + assert_that( + foo.cargo("build").arg("--verbose"), + execs().with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url() + )), + ) +} + +#[test] +fn absolute_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + ( + r#"C:\\bogus\\nonexistent-ar"#, + r#"C:\\bogus\\nonexistent-linker"#, + ) + } else { + (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) + }; + + let foo = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, + target = target, + ar = config.0, + linker = config.1 + ), + ) + .build(); + + let output = if cfg!(windows) { + ( + r#"C:\bogus\nonexistent-ar"#, + r#"C:\bogus\nonexistent-linker"#, + ) + } else { + (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) + }; + + assert_that( + foo.cargo("build").arg("--verbose"), + execs().with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo.url(), + ar = output.0, + linker = output.1 + )), + ) +} + +#[test] +fn relative_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#) + } else { + (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#) + }; + + // Funky directory structure to test that relative tool paths are made absolute + // by reference to the `.cargo/..` directory and not to (for example) the CWD. + let origin = project("origin") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, + target = target, + ar = config.0, + linker = config.1 + ), + ) + .build(); + + let foo_path = origin.root().join("foo"); + let foo_url = path2url(foo_path.clone()); + let prefix = origin.root().into_os_string().into_string().unwrap(); + let output = if cfg!(windows) { + ( + format!(r#"{}\.\nonexistent-ar"#, prefix), + format!(r#"{}\.\tools\nonexistent-linker"#, prefix), + ) + } else { + ( + format!(r#"{}/./nonexistent-ar"#, prefix), + format!(r#"{}/./tools/nonexistent-linker"#, prefix), + ) + }; + + assert_that( + origin.cargo("build").cwd(foo_path).arg("--verbose"), + execs().with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + url = foo_url, + ar = output.0, + linker = output.1 + )), + ) +} + +#[test] +fn custom_runner() { + let target = rustc_host(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("tests/test.rs", "") + .file("benches/bench.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + runner = "nonexistent-runner -r" + "#, + target + ), + ) + .build(); + + assert_that( + p.cargo("run").args(&["--", "--param"]), + execs().with_stderr_contains(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r target[/]debug[/]foo[EXE] --param` +", + url = p.url() + )), + ); + + assert_that( + p.cargo("test") + .args(&["--test", "test", "--verbose", "--", "--param"]), + execs().with_stderr_contains(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..][/]target[/]debug[/]deps[/]test-[..][EXE] --param` +", + url = p.url() + )), + ); + + assert_that( + p.cargo("bench") + .args(&["--bench", "bench", "--verbose", "--", "--param"]), + execs().with_stderr_contains(&format!( + "\ +[COMPILING] foo v0.0.1 ({url}) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..][/]target[/]release[/]deps[/]bench-[..][EXE] --param --bench` +", + url = p.url() + )), + ); +} diff --git a/tests/testsuite/update.rs b/tests/testsuite/update.rs new file mode 100644 index 000000000..053f0888d --- /dev/null +++ b/tests/testsuite/update.rs @@ -0,0 +1,415 @@ +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +#[test] +fn minor_update_two_places() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + File::create(p.root().join("foo/Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn transitive_minor_update() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1.1").publish(); + + // Note that `serde` isn't actually updated here! The default behavior for + // `update` right now is to as conservatively as possible attempt to satisfy + // an update. In this case we previously locked the dependency graph to `log + // 0.1.0`, but nothing on the command line says we're allowed to update + // that. As a result the update of `serde` here shouldn't update to `serde + // 0.1.1` as that would also force an update to `log 0.1.1`. + // + // Also note that this is probably counterintuitive and weird. We may wish + // to change this one day. + assert_that( + p.cargo("update").arg("-p").arg("serde"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +", + ), + ); +} + +#[test] +fn conservative() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1").publish(); + + assert_that( + p.cargo("update").arg("-p").arg("serde"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] serde v0.1.0 -> v0.1.1 +", + ), + ); +} + +#[test] +fn update_via_new_dep() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that( + p.cargo("build").env("RUST_LOG", "cargo=trace"), + execs().with_status(0), + ); +} + +#[test] +fn update_via_new_member() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [workspace] + # members = [ "foo" ] + + [dependencies] + log = "0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn add_dep_deep_new_requirement() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn everything_real_deep() { + Package::new("log", "0.1.0").publish(); + Package::new("foo", "0.1.0").dep("log", "0.1").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn change_package_version() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "a-foo" + version = "0.2.0-alpha" + authors = [] + + [dependencies] + bar = { path = "bar", version = "0.2.0-alpha" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.2.0-alpha" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.2.0" + dependencies = ["bar 0.2.0"] + + [[package]] + name = "bar" + version = "0.2.0" + "#, + ) + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn update_precise() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").publish(); + Package::new("serde", "0.2.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.2" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("serde", "0.2.0").publish(); + + assert_that( + p.cargo("update") + .arg("-p") + .arg("serde:0.2.1") + .arg("--precise") + .arg("0.2.0"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] serde v0.2.1 -> v0.2.0 +", + ), + ); +} diff --git a/tests/testsuite/verify_project.rs b/tests/testsuite/verify_project.rs new file mode 100644 index 000000000..926e467ab --- /dev/null +++ b/tests/testsuite/verify_project.rs @@ -0,0 +1,57 @@ +use cargotest::support::{basic_bin_manifest, execs, main_file, project}; +use hamcrest::assert_that; + +fn verify_project_success_output() -> String { + r#"{"success":"true"}"#.into() +} + +#[test] +fn cargo_verify_project_path_to_cargo_toml_relative() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg("foo/Cargo.toml") + .cwd(p.root().parent().unwrap()), + execs() + .with_status(0) + .with_stdout(verify_project_success_output()), + ); +} + +#[test] +fn cargo_verify_project_path_to_cargo_toml_absolute() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project") + .arg("--manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()), + execs() + .with_status(0) + .with_stdout(verify_project_success_output()), + ); +} + +#[test] +fn cargo_verify_project_cwd() { + let p = project("foo") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + assert_that( + p.cargo("verify-project").cwd(p.root()), + execs() + .with_status(0) + .with_stdout(verify_project_success_output()), + ); +} diff --git a/tests/testsuite/version.rs b/tests/testsuite/version.rs new file mode 100644 index 000000000..a78a1a32d --- /dev/null +++ b/tests/testsuite/version.rs @@ -0,0 +1,51 @@ +use cargo; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn simple() { + let p = project("foo").build(); + + assert_that( + p.cargo("version"), + execs() + .with_status(0) + .with_stdout(&format!("{}\n", cargo::version())), + ); + + assert_that( + p.cargo("--version"), + execs() + .with_status(0) + .with_stdout(&format!("{}\n", cargo::version())), + ); +} + +#[test] +#[cfg_attr(target_os = "windows", ignore)] +fn version_works_without_rustc() { + let p = project("foo").build(); + assert_that(p.cargo("version").env("PATH", ""), execs().with_status(0)); +} + +#[test] +fn version_works_with_bad_config() { + let p = project("foo") + .file(".cargo/config", "this is not toml") + .build(); + assert_that(p.cargo("version"), execs().with_status(0)); +} + +#[test] +fn version_works_with_bad_target_dir() { + let p = project("foo") + .file( + ".cargo/config", + r#" + [build] + target-dir = 4 + "#, + ) + .build(); + assert_that(p.cargo("version"), execs().with_status(0)); +} diff --git a/tests/testsuite/warn_on_failure.rs b/tests/testsuite/warn_on_failure.rs new file mode 100644 index 000000000..31f79fd54 --- /dev/null +++ b/tests/testsuite/warn_on_failure.rs @@ -0,0 +1,111 @@ +use cargotest::support::{execs, project, Project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +static WARNING1: &'static str = "Hello! I'm a warning. :)"; +static WARNING2: &'static str = "And one more!"; + +fn make_lib(lib_src: &str) { + Package::new("foo", "0.0.1") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + use std::io::Write; + println!("cargo:warning={{}}", "{}"); + println!("hidden stdout"); + write!(&mut ::std::io::stderr(), "hidden stderr"); + println!("cargo:warning={{}}", "{}"); + }} + "#, + WARNING1, WARNING2 + ), + ) + .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src)) + .publish(); +} + +fn make_upstream(main_src: &str) -> Project { + project("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", &format!("fn main() {{ {} }}", main_src)) + .build() +} + +#[test] +fn no_warning_on_success() { + make_lib(""); + let upstream = make_upstream(""); + assert_that( + upstream.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn no_warning_on_bin_failure() { + make_lib(""); + let upstream = make_upstream("hi()"); + assert_that( + upstream.cargo("build"), + execs() + .with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) + .with_stderr_contains("[UPDATING] registry `[..]`") + .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] foo v0.0.1") + .with_stderr_contains("[COMPILING] bar v0.0.1 ([..])"), + ); +} + +#[test] +fn warning_on_lib_failure() { + make_lib("err()"); + let upstream = make_upstream(""); + assert_that( + upstream.cargo("build"), + execs() + .with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain("[COMPILING] bar v0.0.1 ([..])") + .with_stderr_contains("[UPDATING] registry `[..]`") + .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] foo v0.0.1") + .with_stderr_contains(&format!("[WARNING] {}", WARNING1)) + .with_stderr_contains(&format!("[WARNING] {}", WARNING2)), + ); +} diff --git a/tests/testsuite/workspaces.rs b/tests/testsuite/workspaces.rs new file mode 100644 index 000000000..191c518f3 --- /dev/null +++ b/tests/testsuite/workspaces.rs @@ -0,0 +1,2340 @@ +use std::env; +use std::fs::{self, File}; +use std::io::{Read, Write}; + +use cargotest::sleep_ms; +use cargotest::support::{execs, git, project}; +use cargotest::support::registry::Package; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; + +#[test] +fn simple_explicit() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn simple_explicit_default_members() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + default-members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("foo"), is_not(existing_file())); +} + +#[test] +fn inferred_root() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn inferred_path_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn transitive_path_dep() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { path = "../baz" } + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + "#, + ) + .file("baz/src/main.rs", "fn main() {}") + .file("baz/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("baz")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("baz/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn parent_pointer_works() { + let p = project("foo") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + + [workspace] + "#, + ) + .file("foo/src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/Cargo.lock"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn same_names_in_workspace() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: two packages named `foo` in this workspace: +- [..]Cargo.toml +- [..]Cargo.toml +", + ), + ); +} + +#[test] +fn parent_doesnt_point_to_child() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(101).with_stderr( + "\ +error: current package believes it's in a workspace when it's not: +current: [..]Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable [..] +", + ), + ); +} + +#[test] +fn invalid_parent_pointer() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ), + ); +} + +#[test] +fn invalid_members() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo"] + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ), + ); +} + +#[test] +fn bare_workspace_ok() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn two_roots() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = [".."] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +", + ), + ); +} + +#[test] +fn workspace_isnt_root() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs() + .with_status(101) + .with_stderr("error: root of a workspace inferred but wasn't a root: [..]"), + ); +} + +#[test] +fn dangling_member() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#, + ) + .file("baz/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: package `[..]` is a member of the wrong workspace +expected: [..] +actual: [..] +", + ), + ); +} + +#[test] +fn cycle() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(101)); +} + +#[test] +fn share_dependencies() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "< 0.1.5" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + Package::new("dep1", "0.1.8").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] dep1 v0.1.3 ([..]) +[COMPILING] dep1 v0.1.3 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn fetch_fetches_all() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + + assert_that( + p.cargo("fetch"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[DOWNLOADING] dep1 v0.1.3 ([..]) +", + ), + ); +} + +#[test] +fn lock_works_for_everyone() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep2 = "0.1" + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.0").publish(); + Package::new("dep2", "0.1.0").publish(); + + assert_that( + p.cargo("generate-lockfile"), + execs() + .with_status(0) + .with_stderr("[UPDATING] registry `[..]`"), + ); + + Package::new("dep1", "0.1.1").publish(); + Package::new("dep2", "0.1.1").publish(); + + assert_that( + p.cargo("build"), + execs().with_status(0).with_stderr( + "\ +[DOWNLOADING] dep2 v0.1.0 ([..]) +[COMPILING] dep2 v0.1.0 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0).with_stderr( + "\ +[DOWNLOADING] dep1 v0.1.0 ([..]) +[COMPILING] dep1 v0.1.0 +[COMPILING] bar v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); +} + +#[test] +fn virtual_works() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn explicit_package_argument_works_with_virtual_manifest() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that( + p.cargo("build").cwd(p.root()).args(&["--package", "bar"]), + execs().with_status(0), + ); + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); +} + +#[test] +fn virtual_misconfigure() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(101).with_stderr( + "\ +error: current package believes it's in a workspace when it's not: +current: [..]bar[..]Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable by adding `bar` to the `workspace.members` array of the \ +manifest located at: [..] +", + ), + ); +} + +#[test] +fn virtual_build_all_implied() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn virtual_default_members() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + default-members = ["bar"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("baz/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("bar"), existing_file()); + assert_that(&p.bin("baz"), is_not(existing_file())); +} + +#[test] +fn virtual_default_member_is_not_a_member() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + default-members = ["something-else"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: package `[..]something-else` is listed in workspace’s default-members \ +but is not a member. +", + ), + ); +} + +#[test] +fn virtual_build_no_members() { + let p = project("foo").file( + "Cargo.toml", + r#" + [workspace] + "#, + ); + let p = p.build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: manifest path `[..]` contains no package: The manifest is virtual, \ +and the workspace has no members. +", + ), + ); +} + +#[test] +fn include_virtual() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [workspace] + "#, + ); + let p = p.build(); + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +", + ), + ); +} + +#[test] +fn members_include_path_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["p1"] + + [dependencies] + p3 = { path = "p3" } + "#, + ) + .file("src/lib.rs", "") + .file( + "p1/Cargo.toml", + r#" + [project] + name = "p1" + version = "0.1.0" + authors = [] + + [dependencies] + p2 = { path = "../p2" } + "#, + ) + .file("p1/src/lib.rs", "") + .file( + "p2/Cargo.toml", + r#" + [project] + name = "p2" + version = "0.1.0" + authors = [] + "#, + ) + .file("p2/src/lib.rs", "") + .file( + "p3/Cargo.toml", + r#" + [project] + name = "p3" + version = "0.1.0" + authors = [] + "#, + ) + .file("p3/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("p1")), + execs().with_status(0), + ); + assert_that( + p.cargo("build").cwd(p.root().join("p2")), + execs().with_status(0), + ); + assert_that( + p.cargo("build").cwd(p.root().join("p3")), + execs().with_status(0), + ); + assert_that(p.cargo("build"), execs().with_status(0)); + + assert_that(&p.root().join("target"), existing_dir()); + assert_that(&p.root().join("p1/target"), is_not(existing_dir())); + assert_that(&p.root().join("p2/target"), is_not(existing_dir())); + assert_that(&p.root().join("p3/target"), is_not(existing_dir())); +} + +#[test] +fn new_warns_you_this_will_not_work() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("new").arg("--lib").arg("bar").env("USER", "foo"), + execs().with_status(0).with_stderr( + "\ +warning: compiling this new crate may not work due to invalid workspace \ +configuration + +current package believes it's in a workspace when it's not: +current: [..] +workspace: [..] + +this may be fixable by ensuring that this crate is depended on by the workspace \ +root: [..] +[CREATED] library `bar` project +", + ), + ); +} + +#[test] +fn lock_doesnt_change_depending_on_crate() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ['baz'] + + [dependencies] + foo = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("baz/src/lib.rs", ""); + let p = p.build(); + + Package::new("foo", "1.0.0").publish(); + Package::new("bar", "1.0.0").publish(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + let mut lockfile = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile)); + + assert_that( + p.cargo("build").cwd(p.root().join("baz")), + execs().with_status(0), + ); + + let mut lockfile2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile2)); + + assert_eq!(lockfile, lockfile2); +} + +#[test] +fn rebuild_please() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ['lib', 'bin'] + "#, + ) + .file( + "lib/Cargo.toml", + r#" + [package] + name = "lib" + version = "0.1.0" + "#, + ) + .file( + "lib/src/lib.rs", + r#" + pub fn foo() -> u32 { 0 } + "#, + ) + .file( + "bin/Cargo.toml", + r#" + [package] + name = "bin" + version = "0.1.0" + + [dependencies] + lib = { path = "../lib" } + "#, + ) + .file( + "bin/src/main.rs", + r#" + extern crate lib; + + fn main() { + assert_eq!(lib::foo(), 0); + } + "#, + ); + let p = p.build(); + + assert_that( + p.cargo("run").cwd(p.root().join("bin")), + execs().with_status(0), + ); + + sleep_ms(1000); + + t!(t!(File::create(p.root().join("lib/src/lib.rs"))).write_all( + br#" + pub fn foo() -> u32 { 1 } + "# + )); + + assert_that( + p.cargo("build").cwd(p.root().join("lib")), + execs().with_status(0), + ); + + assert_that( + p.cargo("run").cwd(p.root().join("bin")), + execs().with_status(101), + ); +} + +#[test] +fn workspace_in_git() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("foo/src/lib.rs", "") + }).unwrap(); + let p = project("foo") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies.foo] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + r#" + pub fn foo() -> u32 { 0 } + "#, + ); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn lockfile_can_specify_nonexistant_members() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + authors = [] + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "Cargo.lock", + r#" + [[package]] + name = "a" + version = "0.1.0" + + [[package]] + name = "b" + version = "0.1.0" + "#, + ); + + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("a")), + execs().with_status(0), + ); +} + +#[test] +fn you_cannot_generate_lockfile_for_empty_workspaces() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("update"), + execs() + .with_status(101) + .with_stderr("error: you can't generate a lockfile for an empty workspace."), + ); +} + +#[test] +fn workspace_with_transitive_dev_deps() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dependencies.bar] + path = "bar" + + [workspace] + "#, + ) + .file("src/main.rs", r#"fn main() {}"#) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dev-dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn init() {} + + #[cfg(test)] + + #[test] + fn test() { + extern crate baz; + baz::do_stuff(); + } + "#, + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + "#, + ) + .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#); + let p = p.build(); + + assert_that(p.cargo("test").args(&["-p", "bar"]), execs().with_status(0)); +} + +#[test] +fn error_if_parent_cargo_toml_is_invalid() { + let p = project("foo") + .file("Cargo.toml", "Totally not a TOML file") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs() + .with_status(101) + .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`"), + ); +} + +#[test] +fn relative_path_for_member_works() { + let p = project("foo") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["../bar"] + "#, + ) + .file("foo/src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that( + p.cargo("build").cwd(p.root().join("bar")), + execs().with_status(0), + ); +} + +#[test] +fn relative_path_for_root_works() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + + [dependencies] + subproj = { path = "./subproj" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "subproj/Cargo.toml", + r#" + [project] + name = "subproj" + version = "0.1.0" + authors = [] + "#, + ) + .file("subproj/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build") + .cwd(p.root()) + .arg("--manifest-path") + .arg("./Cargo.toml"), + execs().with_status(0), + ); + + assert_that( + p.cargo("build") + .cwd(p.root().join("subproj")) + .arg("--manifest-path") + .arg("../Cargo.toml"), + execs().with_status(0), + ); +} + +#[test] +fn path_dep_outside_workspace_is_not_member() { + let p = project("foo") + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#, + ) + .file("ws/src/lib.rs", r"extern crate foo;") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0), + ); +} + +#[test] +fn test_in_and_out_of_workspace() { + let p = project("foo") + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + members = [ "../bar" ] + "#, + ) + .file( + "ws/src/lib.rs", + r"extern crate foo; pub fn f() { foo::f() }", + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + "#, + ) + .file( + "foo/src/lib.rs", + "extern crate bar; pub fn f() { bar::f() }", + ) + .file( + "bar/Cargo.toml", + r#" + [project] + workspace = "../ws" + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0), + ); + + assert_that(&p.root().join("ws/Cargo.lock"), existing_file()); + assert_that(&p.root().join("ws/target"), existing_dir()); + assert_that(&p.root().join("foo/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("foo/target"), is_not(existing_dir())); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("bar/target"), is_not(existing_dir())); + + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/Cargo.lock"), existing_file()); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("bar/target"), is_not(existing_dir())); +} + +#[test] +fn test_path_dependency_under_member() { + let p = project("foo") + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#, + ) + .file( + "ws/src/lib.rs", + r"extern crate foo; pub fn f() { foo::f() }", + ) + .file( + "foo/Cargo.toml", + r#" + [project] + workspace = "../ws" + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "./bar" } + "#, + ) + .file( + "foo/src/lib.rs", + "extern crate bar; pub fn f() { bar::f() }", + ) + .file( + "foo/bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + assert_that( + p.cargo("build").cwd(p.root().join("ws")), + execs().with_status(0), + ); + + assert_that( + &p.root().join("foo/bar/Cargo.lock"), + is_not(existing_file()), + ); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); + + assert_that( + p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0), + ); + + assert_that( + &p.root().join("foo/bar/Cargo.lock"), + is_not(existing_file()), + ); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); +} + +#[test] +fn excluded_simple() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/target"), existing_dir()); +} + +#[test] +fn exclude_members_preferred() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo/bar"] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "foo/bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that( + p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir())); +} + +#[test] +fn exclude_but_also_depend() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "foo/bar" } + + [workspace] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "foo/bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that( + p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/target"), existing_dir()); + assert_that( + p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0), + ); + assert_that(&p.root().join("foo/bar/target"), existing_dir()); +} + +#[test] +fn glob_syntax() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + exclude = ["crates/qux"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/bar/src/main.rs", "fn main() {}") + .file("crates/baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/baz/src/main.rs", "fn main() {}") + .file("crates/qux/Cargo.toml", r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#) + .file("crates/qux/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that( + p.cargo("build").cwd(p.root().join("crates/bar")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that( + p.cargo("build").cwd(p.root().join("crates/baz")), + execs().with_status(0), + ); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that( + p.cargo("build").cwd(p.root().join("crates/qux")), + execs().with_status(0), + ); + assert_that(&p.bin("qux"), is_not(existing_file())); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that( + &p.root().join("crates/bar/Cargo.lock"), + is_not(existing_file()), + ); + assert_that( + &p.root().join("crates/baz/Cargo.lock"), + is_not(existing_file()), + ); + assert_that(&p.root().join("crates/qux/Cargo.lock"), existing_file()); +} + +/*FIXME: This fails because of how workspace.exclude and workspace.members are working. +#[test] +fn glob_syntax_2() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/b*"] + exclude = ["crates/q*"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/bar/src/main.rs", "fn main() {}") + .file("crates/baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/baz/src/main.rs", "fn main() {}") + .file("crates/qux/Cargo.toml", r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#) + .file("crates/qux/src/main.rs", "fn main() {}"); + p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), is_not(existing_file())); + assert_that(&p.bin("baz"), is_not(existing_file())); + + assert_that(p.cargo("build").cwd(p.root().join("crates/bar")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("bar"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/baz")), + execs().with_status(0)); + assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("baz"), existing_file()); + + assert_that(p.cargo("build").cwd(p.root().join("crates/qux")), + execs().with_status(0)); + assert_that(&p.bin("qux"), is_not(existing_file())); + + assert_that(&p.root().join("Cargo.lock"), existing_file()); + assert_that(&p.root().join("crates/bar/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/baz/Cargo.lock"), is_not(existing_file())); + assert_that(&p.root().join("crates/qux/Cargo.lock"), existing_file()); +} +*/ + +#[test] +fn glob_syntax_invalid_members() { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + assert_that( + p.cargo("build"), + execs().with_status(101).with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ), + ); +} + +/// This is a freshness test for feature use with workspaces +/// +/// feat_lib is used by caller1 and caller2, but with different features enabled. +/// This test ensures that alternating building caller1, caller2 doesn't force +/// recompile of feat_lib. +/// +/// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then +/// a single cargo build at the top level will be enough. +#[test] +fn dep_used_with_separate_features() { + let p = project("foo") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["feat_lib", "caller1", "caller2"] + "#, + ) + .file( + "feat_lib/Cargo.toml", + r#" + [project] + name = "feat_lib" + version = "0.1.0" + authors = [] + + [features] + myfeature = [] + "#, + ) + .file("feat_lib/src/lib.rs", "") + .file( + "caller1/Cargo.toml", + r#" + [project] + name = "caller1" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib" } + "#, + ) + .file("caller1/src/main.rs", "fn main() {}") + .file("caller1/src/lib.rs", "") + .file( + "caller2/Cargo.toml", + r#" + [project] + name = "caller2" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib", features = ["myfeature"] } + caller1 = { path = "../caller1" } + "#, + ) + .file("caller2/src/main.rs", "fn main() {}") + .file("caller2/src/lib.rs", ""); + let p = p.build(); + + // Build the entire workspace + assert_that( + p.cargo("build").arg("--all"), + execs().with_status(0).with_stderr( + "\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[..]Compiling caller2 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + assert_that(&p.bin("caller1"), existing_file()); + assert_that(&p.bin("caller2"), existing_file()); + + // Build caller1. should build the dep library. Because the features + // are different than the full workspace, it rebuilds. + // Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then + // a single cargo build at the top level will be enough. + assert_that( + p.cargo("build").cwd(p.root().join("caller1")), + execs().with_status(0).with_stderr( + "\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ), + ); + + // Alternate building caller2/caller1 a few times, just to make sure + // features are being built separately. Should not rebuild anything + assert_that( + p.cargo("build").cwd(p.root().join("caller2")), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + assert_that( + p.cargo("build").cwd(p.root().join("caller1")), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); + assert_that( + p.cargo("build").cwd(p.root().join("caller2")), + execs() + .with_status(0) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"), + ); +} + +#[test] +fn dont_recurse_out_of_cargo_home() { + let git_project = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "dep" + version = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + use std::path::Path; + use std::process::{self, Command}; + + fn main() { + let cargo = env::var_os("CARGO").unwrap(); + let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); + let output = Command::new(cargo) + .args(&["metadata", "--format-version", "1", "--manifest-path"]) + .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml")) + .output() + .unwrap(); + if !output.status.success() { + eprintln!("{}", String::from_utf8(output.stderr).unwrap()); + process::exit(1); + } + } + "#, + ) + }).unwrap(); + let p = project("lib") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies.dep] + git = "{}" + + [workspace] + "#, + git_project.url() + ), + ) + .file("src/lib.rs", ""); + let p = p.build(); + + assert_that( + p.cargo("build").env("CARGO_HOME", p.root().join(".cargo")), + execs().with_status(0), + ); +} + +/*FIXME: This fails because of how workspace.exclude and workspace.members are working. +#[test] +fn include_and_exclude() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["foo"] + exclude = ["foo/bar"] + "#) + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("foo/bar/src/lib.rs", ""); + p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("foo")), + execs().with_status(0)); + assert_that(&p.root().join("target"), existing_dir()); + assert_that(&p.root().join("foo/target"), is_not(existing_dir())); + assert_that(p.cargo("build").cwd(p.root().join("foo/bar")), + execs().with_status(0)); + assert_that(&p.root().join("foo/bar/target"), existing_dir()); +} +*/ + +#[test] +fn cargo_home_at_root_works() { + let p = project("lib") + .file( + "Cargo.toml", + r#" + [package] + name = "lib" + version = "0.1.0" + + [workspace] + members = ["a"] + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + "#, + ) + .file("a/src/lib.rs", ""); + let p = p.build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + assert_that( + p.cargo("build").arg("--frozen").env("CARGO_HOME", p.root()), + execs().with_status(0), + ); +} + +#[test] +fn relative_rustc() { + let p = project("the_exe") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file( + "src/main.rs", + r#" + use std::process::Command; + use std::env; + + fn main() { + let mut cmd = Command::new("rustc"); + for arg in env::args_os().skip(1) { + cmd.arg(arg); + } + std::process::exit(cmd.status().unwrap().code().unwrap()); + } + "#, + ) + .build(); + assert_that(p.cargo("build"), execs().with_status(0)); + + let src = p.root() + .join("target/debug/foo") + .with_extension(env::consts::EXE_EXTENSION); + + Package::new("a", "0.1.0").publish(); + + let p = project("lib") + .file( + "Cargo.toml", + r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies] + a = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap(); + + let file = format!("./foo{}", env::consts::EXE_SUFFIX); + assert_that(p.cargo("build").env("RUSTC", &file), execs().with_status(0)); +}