use self::types::{ActivateError, ActivateResult, Candidate, ConflictReason, DepsFrame, GraphNode};
use self::types::{RcVecIter, RegistryQueryer};
-pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve, encodable_package_id};
+pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
pub use self::encode::{Metadata, WorkspaceResolve};
pub use self::resolve::{Deps, DepsNotReplaced, Resolve};
pub use self::types::Method;
);
check_cycles(&resolve, &cx.activations)?;
+ check_duplicate_pkgs(&resolve)?;
trace!("resolved: {:?}", resolve);
// If we have a shell, emit warnings about required deps used as feature.
Ok(())
}
}
+
+fn get_duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
+ let mut unique_pkg_ids = HashSet::new();
+ let mut result = HashSet::new();
+ for pkg_id in resolve.iter() {
+ let mut encodable_pkd_id = encode::encodable_package_id(pkg_id);
+ if !unique_pkg_ids.insert(encodable_pkd_id) {
+ result.insert(pkg_id.name().as_str());
+ }
+ }
+ result.into_iter().collect()
+}
+
+fn check_duplicate_pkgs(resolve: &Resolve) -> CargoResult<()> {
+ let names = get_duplicate_pkgs(resolve);
+ if names.is_empty() {
+ Ok(())
+ } else {
+ bail!(
+ "dependencies contain duplicate package(s) in the \
+ same namespace from the same source: {}",
+ names.join(", ")
+ )
+ }
+}
-use std::collections::HashSet;
use std::io::prelude::*;
use toml;
-use core::resolver::WorkspaceResolve;
use core::{resolver, Resolve, Workspace};
-use util::errors::{CargoResult, CargoResultExt, Internal};
-use util::toml as cargo_toml;
+use core::resolver::WorkspaceResolve;
use util::Filesystem;
+use util::errors::{CargoResult, CargoResultExt};
+use util::toml as cargo_toml;
pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
if !ws.root().join("Cargo.lock").exists() {
let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
let v: resolver::EncodableResolve = resolve.try_into()?;
Ok(Some(v.into_resolve(ws)?))
- })().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
+ })()
+ .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
Ok(resolve)
}
-fn duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
- let mut unique_names = HashSet::new();
- let mut result = HashSet::new();
- for pkg_id in resolve.iter() {
- let mut encodable_pkd_id = resolver::encodable_package_id(pkg_id);
- if !unique_names.insert(encodable_pkd_id) {
- result.insert(pkg_id.name().as_str());
- }
- }
- result.into_iter().collect()
-}
-
-fn check_duplicate_pkgs(resolve: &Resolve) -> Result<(), Internal> {
- let names = duplicate_pkgs(resolve);
- if names.is_empty() {
- Ok(())
- } else {
- Err(Internal::new(format_err!(
- "dependencies contain duplicate package(s): {}",
- names.join(", ")
- )))
- }
-}
-
pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
// Load the original lockfile if it exists.
let ws_root = Filesystem::new(ws.root().to_path_buf());
Ok(s)
});
- check_duplicate_pkgs(resolve).chain_err(|| format!("failed to generate lock file"))?;
-
let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap();
let mut out = String::new();
use std::io::prelude::*;
use cargotest::support::registry::Package;
-use cargotest::support::{execs, project, ProjectBuilder, paths};
+use cargotest::support::{execs, paths, project, ProjectBuilder};
use cargotest::ChannelChanger;
use hamcrest::{assert_that, existing_file, is_not};
.build();
// should fail due to a duplicate package `common` in the lockfile
- assert_that(b.cargo("build"), execs().with_status(101));
+ assert_that(
+ b.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "[..]dependencies contain duplicate package(s) in the \
+ same namespace from the same source: common",
+ ),
+ );
}