From b04c79a0e5a19b535b4bef8854aea3066e9e3373 Mon Sep 17 00:00:00 2001 From: boxdot Date: Sun, 3 Jun 2018 22:55:50 +0200 Subject: [PATCH] Move duplicate pkgs check to resolve builder. --- src/cargo/core/resolver/mod.rs | 28 +++++++++++++++++++++- src/cargo/ops/lockfile.rs | 36 ++++------------------------ tests/testsuite/generate_lockfile.rs | 10 ++++++-- 3 files changed, 40 insertions(+), 34 deletions(-) diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index f60560240..db3ed57c0 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -65,7 +65,7 @@ use self::context::{Activations, Context}; use self::types::{ActivateError, ActivateResult, Candidate, ConflictReason, DepsFrame, GraphNode}; use self::types::{RcVecIter, RegistryQueryer}; -pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve, encodable_package_id}; +pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::encode::{Metadata, WorkspaceResolve}; pub use self::resolve::{Deps, DepsNotReplaced, Resolve}; pub use self::types::Method; @@ -140,6 +140,7 @@ pub fn resolve( ); check_cycles(&resolve, &cx.activations)?; + check_duplicate_pkgs(&resolve)?; trace!("resolved: {:?}", resolve); // If we have a shell, emit warnings about required deps used as feature. @@ -1098,3 +1099,28 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> Ok(()) } } + +fn get_duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> { + let mut unique_pkg_ids = HashSet::new(); + let mut result = HashSet::new(); + for pkg_id in resolve.iter() { + let mut encodable_pkd_id = encode::encodable_package_id(pkg_id); + if !unique_pkg_ids.insert(encodable_pkd_id) { + result.insert(pkg_id.name().as_str()); + } + } + result.into_iter().collect() +} + +fn check_duplicate_pkgs(resolve: &Resolve) -> CargoResult<()> { + let names = get_duplicate_pkgs(resolve); + if names.is_empty() { + Ok(()) + } else { + bail!( + "dependencies contain duplicate package(s) in the \ + same namespace from the same source: {}", + names.join(", ") + ) + } +} diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 83bd90695..d90e5f408 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -1,13 +1,12 @@ -use std::collections::HashSet; use std::io::prelude::*; use toml; -use core::resolver::WorkspaceResolve; use core::{resolver, Resolve, Workspace}; -use util::errors::{CargoResult, CargoResultExt, Internal}; -use util::toml as cargo_toml; +use core::resolver::WorkspaceResolve; use util::Filesystem; +use util::errors::{CargoResult, CargoResultExt}; +use util::toml as cargo_toml; pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { if !ws.root().join("Cargo.lock").exists() { @@ -26,34 +25,11 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; let v: resolver::EncodableResolve = resolve.try_into()?; Ok(Some(v.into_resolve(ws)?)) - })().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + })() + .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; Ok(resolve) } -fn duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> { - let mut unique_names = HashSet::new(); - let mut result = HashSet::new(); - for pkg_id in resolve.iter() { - let mut encodable_pkd_id = resolver::encodable_package_id(pkg_id); - if !unique_names.insert(encodable_pkd_id) { - result.insert(pkg_id.name().as_str()); - } - } - result.into_iter().collect() -} - -fn check_duplicate_pkgs(resolve: &Resolve) -> Result<(), Internal> { - let names = duplicate_pkgs(resolve); - if names.is_empty() { - Ok(()) - } else { - Err(Internal::new(format_err!( - "dependencies contain duplicate package(s): {}", - names.join(", ") - ))) - } -} - pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> { // Load the original lockfile if it exists. let ws_root = Filesystem::new(ws.root().to_path_buf()); @@ -64,8 +40,6 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> Ok(s) }); - check_duplicate_pkgs(resolve).chain_err(|| format!("failed to generate lock file"))?; - let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap(); let mut out = String::new(); diff --git a/tests/testsuite/generate_lockfile.rs b/tests/testsuite/generate_lockfile.rs index 398722a13..ec21fa110 100644 --- a/tests/testsuite/generate_lockfile.rs +++ b/tests/testsuite/generate_lockfile.rs @@ -2,7 +2,7 @@ use std::fs::{self, File}; use std::io::prelude::*; use cargotest::support::registry::Package; -use cargotest::support::{execs, project, ProjectBuilder, paths}; +use cargotest::support::{execs, paths, project, ProjectBuilder}; use cargotest::ChannelChanger; use hamcrest::{assert_that, existing_file, is_not}; @@ -304,5 +304,11 @@ fn duplicate_entries_in_lockfile() { .build(); // should fail due to a duplicate package `common` in the lockfile - assert_that(b.cargo("build"), execs().with_status(101)); + assert_that( + b.cargo("build"), + execs().with_status(101).with_stderr_contains( + "[..]dependencies contain duplicate package(s) in the \ + same namespace from the same source: common", + ), + ); } -- 2.30.2