Move duplicate pkgs check to resolve builder.
authorboxdot <d@zerovolt.org>
Sun, 3 Jun 2018 20:55:50 +0000 (22:55 +0200)
committerboxdot <d@zerovolt.org>
Sun, 3 Jun 2018 20:55:50 +0000 (22:55 +0200)
src/cargo/core/resolver/mod.rs
src/cargo/ops/lockfile.rs
tests/testsuite/generate_lockfile.rs

index f60560240447e75782d5bf87118951d345d2820b..db3ed57c0984fa74d08b3d0dc86f9a96fec8a19a 100644 (file)
@@ -65,7 +65,7 @@ use self::context::{Activations, Context};
 use self::types::{ActivateError, ActivateResult, Candidate, ConflictReason, DepsFrame, GraphNode};
 use self::types::{RcVecIter, RegistryQueryer};
 
-pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve, encodable_package_id};
+pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
 pub use self::encode::{Metadata, WorkspaceResolve};
 pub use self::resolve::{Deps, DepsNotReplaced, Resolve};
 pub use self::types::Method;
@@ -140,6 +140,7 @@ pub fn resolve(
     );
 
     check_cycles(&resolve, &cx.activations)?;
+    check_duplicate_pkgs(&resolve)?;
     trace!("resolved: {:?}", resolve);
 
     // If we have a shell, emit warnings about required deps used as feature.
@@ -1098,3 +1099,28 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
         Ok(())
     }
 }
+
+fn get_duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
+    let mut unique_pkg_ids = HashSet::new();
+    let mut result = HashSet::new();
+    for pkg_id in resolve.iter() {
+        let mut encodable_pkd_id = encode::encodable_package_id(pkg_id);
+        if !unique_pkg_ids.insert(encodable_pkd_id) {
+            result.insert(pkg_id.name().as_str());
+        }
+    }
+    result.into_iter().collect()
+}
+
+fn check_duplicate_pkgs(resolve: &Resolve) -> CargoResult<()> {
+    let names = get_duplicate_pkgs(resolve);
+    if names.is_empty() {
+        Ok(())
+    } else {
+        bail!(
+            "dependencies contain duplicate package(s) in the \
+             same namespace from the same source: {}",
+            names.join(", ")
+        )
+    }
+}
index 83bd906950325d66b14f357f32287db0444df1e8..d90e5f4084868506d11882f310c05921e7ff8c84 100644 (file)
@@ -1,13 +1,12 @@
-use std::collections::HashSet;
 use std::io::prelude::*;
 
 use toml;
 
-use core::resolver::WorkspaceResolve;
 use core::{resolver, Resolve, Workspace};
-use util::errors::{CargoResult, CargoResultExt, Internal};
-use util::toml as cargo_toml;
+use core::resolver::WorkspaceResolve;
 use util::Filesystem;
+use util::errors::{CargoResult, CargoResultExt};
+use util::toml as cargo_toml;
 
 pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
     if !ws.root().join("Cargo.lock").exists() {
@@ -26,34 +25,11 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
             let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
             let v: resolver::EncodableResolve = resolve.try_into()?;
             Ok(Some(v.into_resolve(ws)?))
-        })().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
+        })()
+            .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
     Ok(resolve)
 }
 
-fn duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
-    let mut unique_names = HashSet::new();
-    let mut result = HashSet::new();
-    for pkg_id in resolve.iter() {
-        let mut encodable_pkd_id = resolver::encodable_package_id(pkg_id);
-        if !unique_names.insert(encodable_pkd_id) {
-            result.insert(pkg_id.name().as_str());
-        }
-    }
-    result.into_iter().collect()
-}
-
-fn check_duplicate_pkgs(resolve: &Resolve) -> Result<(), Internal> {
-    let names = duplicate_pkgs(resolve);
-    if names.is_empty() {
-        Ok(())
-    } else {
-        Err(Internal::new(format_err!(
-            "dependencies contain duplicate package(s): {}",
-            names.join(", ")
-        )))
-    }
-}
-
 pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
     // Load the original lockfile if it exists.
     let ws_root = Filesystem::new(ws.root().to_path_buf());
@@ -64,8 +40,6 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
         Ok(s)
     });
 
-    check_duplicate_pkgs(resolve).chain_err(|| format!("failed to generate lock file"))?;
-
     let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap();
 
     let mut out = String::new();
index 398722a1333d72d82b0d8eb7cbf6314d071c0ebf..ec21fa1104b3b809b3764c009e4126f12d608c30 100644 (file)
@@ -2,7 +2,7 @@ use std::fs::{self, File};
 use std::io::prelude::*;
 
 use cargotest::support::registry::Package;
-use cargotest::support::{execs, project, ProjectBuilder, paths};
+use cargotest::support::{execs, paths, project, ProjectBuilder};
 use cargotest::ChannelChanger;
 use hamcrest::{assert_that, existing_file, is_not};
 
@@ -304,5 +304,11 @@ fn duplicate_entries_in_lockfile() {
         .build();
 
     // should fail due to a duplicate package `common` in the lockfile
-    assert_that(b.cargo("build"), execs().with_status(101));
+    assert_that(
+        b.cargo("build"),
+        execs().with_status(101).with_stderr_contains(
+            "[..]dependencies contain duplicate package(s) in the \
+             same namespace from the same source: common",
+        ),
+    );
 }