From c933673e7d5532b19f9e6cef5d05c0a749f9b543 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 2 Mar 2018 09:44:47 -0800 Subject: [PATCH] Try a lot harder to recover corrupt git repos We've received a lot of intermittent bug reports historically about corrupt git repositories. These inevitably happens as Cargo is ctrl-c'd or for whatever other reason, and to provide a better user experience Cargo strives to automatically handle these situations by blowing away the old checkout for a new update. This commit adds a new test which attempts to pathologically corrupt a git database and checkout in an attempt to expose bugs in Cargo. Sure enough there were some more locations that we needed to handle gracefully for corrupt git checkouts. Notable inclusions were: * The `fetch` operation in libgit2 would fail due to corrupt references. This starts by adding an explicit whitelist for classes of errors coming out of `fetch` to auto-retry by blowing away the repository. We need to be super careful here as network errors commonly come out of this function and we don't want to too aggressively re-clone. * After a `fetch` succeeded a repository could fail to actual resolve a git reference to the actual revision we want. This indicated that we indeed needed to blow everything away and re-clone entirely again. * When creating a checkout from a database the `reset` operation might fail due to a corrupt local database of the checkout itself. If this happens we needed to just blow it away and try again. There's likely more lurking situations where we need to re-clone but I figure we can discover those over time. --- .travis.yml | 2 +- src/cargo/ops/cargo_clean.rs | 7 +- src/cargo/ops/cargo_install.rs | 7 +- src/cargo/ops/cargo_package.rs | 4 +- src/cargo/ops/cargo_rustc/mod.rs | 9 +- src/cargo/ops/cargo_rustc/output_depinfo.rs | 18 +- src/cargo/sources/git/source.rs | 6 +- src/cargo/sources/git/utils.rs | 174 +++++++++++++------- src/cargo/util/flock.rs | 7 +- src/cargo/util/paths.rs | 73 +++++++- tests/testsuite/corrupt_git.rs | 161 ++++++++++++++++++ tests/testsuite/git.rs | 3 +- tests/testsuite/lib.rs | 1 + 13 files changed, 380 insertions(+), 92 deletions(-) create mode 100644 tests/testsuite/corrupt_git.rs diff --git a/.travis.yml b/.travis.yml index 6be079703..f03698190 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,7 +34,7 @@ matrix: script: - cargo test - cargo doc --no-deps - - (cd src/doc && mdbook build --no-create --dest-dir ../../target/doc) + - (cd src/doc && mdbook build --dest-dir ../../target/doc) exclude: - rust: stable diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 1cf20df52..647aa1d36 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -5,6 +5,7 @@ use std::path::Path; use core::{Profiles, Workspace}; use util::Config; use util::errors::{CargoResult, CargoResultExt}; +use util::paths; use ops::{self, Context, BuildConfig, Kind, Unit}; pub struct CleanOptions<'a> { @@ -99,14 +100,14 @@ fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { let m = fs::metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { config.shell().verbose(|shell| {shell.status("Removing", path.display())})?; - fs::remove_dir_all(path).chain_err(|| { + paths::remove_dir_all(path).chain_err(|| { format_err!("could not remove build directory") })?; } else if m.is_ok() { config.shell().verbose(|shell| {shell.status("Removing", path.display())})?; - fs::remove_file(path).chain_err(|| { + paths::remove_file(path).chain_err(|| { format_err!("failed to remove build artifact") })?; } Ok(()) -} \ No newline at end of file +} diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index f3cbdac45..3acf20766 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -17,6 +17,7 @@ use sources::{GitSource, PathSource, SourceConfigMap}; use util::{Config, internal}; use util::{Filesystem, FileLock}; use util::errors::{CargoResult, CargoResultExt}; +use util::paths; #[derive(Deserialize, Serialize)] #[serde(untagged)] @@ -47,7 +48,7 @@ impl Transaction { impl Drop for Transaction { fn drop(&mut self) { for bin in self.bins.iter() { - let _ = fs::remove_file(bin); + let _ = paths::remove_file(bin); } } } @@ -319,7 +320,7 @@ fn install_one(root: &Filesystem, // Don't bother grabbing a lock as we're going to blow it all away // anyway. let target_dir = ws.target_dir().into_path_unlocked(); - fs::remove_dir_all(&target_dir)?; + paths::remove_dir_all(&target_dir)?; } Ok(()) @@ -667,7 +668,7 @@ pub fn uninstall_one(root: &Filesystem, write_crate_list(&crate_metadata, metadata)?; for bin in to_remove { config.shell().status("Removing", bin.display())?; - fs::remove_file(bin)?; + paths::remove_file(bin)?; } Ok(()) diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 0ad54e6a7..b1fe6e767 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -304,8 +304,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult let f = GzDecoder::new(tar.file()); let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version())); - if fs::metadata(&dst).is_ok() { - fs::remove_dir_all(&dst)?; + if dst.exists() { + paths::remove_dir_all(&dst)?; } let mut archive = Archive::new(f); archive.unpack(dst.parent().unwrap())?; diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 5a05d45a6..4cbdf2d25 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -15,6 +15,7 @@ use core::manifest::Lto; use core::shell::ColorChoice; use util::{self, ProcessBuilder, machine_message}; use util::{Config, internal, profile, join_paths}; +use util::paths; use util::errors::{CargoResult, CargoResultExt, Internal}; use util::Freshness; @@ -385,9 +386,7 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, if filename.extension() == Some(OsStr::new("rmeta")) { let dst = root.join(filename).with_extension("rlib"); if dst.exists() { - fs::remove_file(&dst).chain_err(|| { - format!("Could not remove file: {}.", dst.display()) - })?; + paths::remove_file(&dst)?; } } } @@ -541,9 +540,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, continue } if dst.exists() { - fs::remove_file(&dst).chain_err(|| { - format!("failed to remove: {}", dst.display()) - })?; + paths::remove_file(&dst)?; } let link_result = if src.is_dir() { diff --git a/src/cargo/ops/cargo_rustc/output_depinfo.rs b/src/cargo/ops/cargo_rustc/output_depinfo.rs index bfcebd5aa..f51440a9e 100644 --- a/src/cargo/ops/cargo_rustc/output_depinfo.rs +++ b/src/cargo/ops/cargo_rustc/output_depinfo.rs @@ -1,10 +1,11 @@ use std::collections::HashSet; -use std::io::{Write, BufWriter, ErrorKind}; -use std::fs::{self, File}; +use std::io::{Write, BufWriter}; +use std::fs::File; use std::path::{Path, PathBuf}; use ops::{Context, Unit}; use util::{CargoResult, internal}; +use util::paths; use ops::cargo_rustc::fingerprint; fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { @@ -81,13 +82,12 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) -> write!(outfile, " {}", render_filename(dep, basedir)?)?; } writeln!(outfile, "")?; - } else if let Err(err) = fs::remove_file(output_path) { - // dep-info generation failed, so delete output file. This will usually - // cause the build system to always rerun the build rule, which is correct - // if inefficient. - if err.kind() != ErrorKind::NotFound { - return Err(err.into()); - } + + // dep-info generation failed, so delete output file. This will + // usually cause the build system to always rerun the build + // rule, which is correct if inefficient. + } else if output_path.exists() { + paths::remove_file(output_path)?; } } } diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 6d3d878a6..aba45e531 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -6,7 +6,7 @@ use core::source::{Source, SourceId}; use core::GitReference; use core::{Package, PackageId, Summary, Registry, Dependency}; use util::Config; -use util::errors::{CargoResult, Internal}; +use util::errors::CargoResult; use util::hex::short_hash; use sources::PathSource; use sources::git::utils::{GitRemote, GitRevision}; @@ -170,9 +170,7 @@ impl<'cfg> Source for GitSource<'cfg> { trace!("updating git source `{:?}`", self.remote); - let repo = self.remote.checkout(&db_path, self.config)?; - let rev = repo.rev_for(&self.reference).map_err(Internal::new)?; - (repo, rev) + self.remote.checkout(&db_path, &self.reference, self.config)? } else { (self.remote.db_at(&db_path)?, actual_rev.unwrap()) }; diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index fd834da50..da69dce28 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -12,6 +12,7 @@ use url::Url; use core::GitReference; use util::{ToUrl, internal, Config, network, Progress}; +use util::paths; use util::errors::{CargoResult, CargoResultExt, CargoError}; #[derive(PartialEq, Clone, Debug)] @@ -87,30 +88,40 @@ impl GitRemote { pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { - let db = self.db_at(path)?; - db.rev_for(reference) + reference.resolve(&self.db_at(path)?.repo) } - pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult { - let repo = match git2::Repository::open(into) { - Ok(mut repo) => { - self.fetch_into(&mut repo, cargo_config).chain_err(|| { - format!("failed to fetch into {}", into.display()) - })?; - repo + pub fn checkout(&self, + into: &Path, + reference: &GitReference, + cargo_config: &Config) + -> CargoResult<(GitDatabase, GitRevision)> + { + let mut repo_and_rev = None; + if let Ok(mut repo) = git2::Repository::open(into) { + self.fetch_into(&mut repo, cargo_config).chain_err(|| { + format!("failed to fetch into {}", into.display()) + })?; + if let Ok(rev) = reference.resolve(&repo) { + repo_and_rev = Some((repo, rev)); } - Err(..) => { - self.clone_into(into, cargo_config).chain_err(|| { + } + let (repo, rev) = match repo_and_rev { + Some(pair) => pair, + None => { + let repo = self.clone_into(into, cargo_config).chain_err(|| { format!("failed to clone into: {}", into.display()) - })? + })?; + let rev = reference.resolve(&repo)?; + (repo, rev) } }; - Ok(GitDatabase { + Ok((GitDatabase { remote: self.clone(), path: into.to_path_buf(), repo, - }) + }, rev)) } pub fn db_at(&self, db_path: &Path) -> CargoResult { @@ -130,7 +141,7 @@ impl GitRemote { fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { if fs::metadata(&dst).is_ok() { - fs::remove_dir_all(dst)?; + paths::remove_dir_all(dst)?; } fs::create_dir_all(dst)?; let mut repo = git2::Repository::init_bare(dst)?; @@ -142,29 +153,51 @@ impl GitRemote { impl GitDatabase { pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config) -> CargoResult { - let checkout = match git2::Repository::open(dest) { - Ok(repo) => { - let mut checkout = GitCheckout::new(dest, self, rev, repo); - if !checkout.is_fresh() { - checkout.fetch(cargo_config)?; - checkout.reset(cargo_config)?; - assert!(checkout.is_fresh()); + let mut checkout = None; + if let Ok(repo) = git2::Repository::open(dest) { + let mut co = GitCheckout::new(dest, self, rev.clone(), repo); + if !co.is_fresh() { + // After a successful fetch operation do a sanity check to + // ensure we've got the object in our database to reset to. This + // can fail sometimes for corrupt repositories where the fetch + // operation succeeds but the object isn't actually there. + co.fetch(cargo_config)?; + if co.has_object() { + co.reset(cargo_config)?; + assert!(co.is_fresh()); + checkout = Some(co); } - checkout + } else { + checkout = Some(co); } - Err(..) => GitCheckout::clone_into(dest, self, rev, cargo_config)?, + }; + let checkout = match checkout { + Some(c) => c, + None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, }; checkout.update_submodules(cargo_config)?; Ok(checkout) } - pub fn rev_for(&self, reference: &GitReference) -> CargoResult { - let id = match *reference { + pub fn to_short_id(&self, revision: GitRevision) -> CargoResult { + let obj = self.repo.find_object(revision.0, None)?; + Ok(GitShortID(obj.short_id()?)) + } + + pub fn has_ref(&self, reference: &str) -> CargoResult<()> { + self.repo.revparse_single(reference)?; + Ok(()) + } +} + +impl GitReference { + fn resolve(&self, repo: &git2::Repository) -> CargoResult { + let id = match *self { GitReference::Tag(ref s) => { (|| -> CargoResult { let refname = format!("refs/tags/{}", s); - let id = self.repo.refname_to_id(&refname)?; - let obj = self.repo.find_object(id, None)?; + let id = repo.refname_to_id(&refname)?; + let obj = repo.find_object(id, None)?; let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id()) })().chain_err(|| { @@ -173,7 +206,7 @@ impl GitDatabase { } GitReference::Branch(ref s) => { (|| { - let b = self.repo.find_branch(s, git2::BranchType::Local)?; + let b = repo.find_branch(s, git2::BranchType::Local)?; b.get().target().ok_or_else(|| { format_err!("branch `{}` did not have a target", s) }) @@ -182,7 +215,7 @@ impl GitDatabase { })? } GitReference::Rev(ref s) => { - let obj = self.repo.revparse_single(s)?; + let obj = repo.revparse_single(s)?; match obj.as_tag() { Some(tag) => tag.target_id(), None => obj.id(), @@ -191,16 +224,6 @@ impl GitDatabase { }; Ok(GitRevision(id)) } - - pub fn to_short_id(&self, revision: GitRevision) -> CargoResult { - let obj = self.repo.find_object(revision.0, None)?; - Ok(GitShortID(obj.short_id()?)) - } - - pub fn has_ref(&self, reference: &str) -> CargoResult<()> { - self.repo.revparse_single(reference)?; - Ok(()) - } } impl<'a> GitCheckout<'a> { @@ -227,9 +250,7 @@ impl<'a> GitCheckout<'a> { format!("Couldn't mkdir {}", dirname.display()) })?; if into.exists() { - fs::remove_dir_all(into).chain_err(|| { - format!("Couldn't rmdir {}", into.display()) - })?; + paths::remove_dir_all(into)?; } // we're doing a local filesystem-to-filesystem clone so there should @@ -285,6 +306,10 @@ impl<'a> GitCheckout<'a> { Ok(()) } + fn has_object(&self) -> bool { + self.repo.find_object(self.revision.0, None).is_ok() + } + fn reset(&self, config: &Config) -> CargoResult<()> { // If we're interrupted while performing this reset (e.g. we die because // of a signal) Cargo needs to be sure to try to check out this repo @@ -295,7 +320,7 @@ impl<'a> GitCheckout<'a> { // ready to go. Hence if we start to do a reset, we make sure this file // *doesn't* exist, and then once we're done we create the file. let ok_file = self.location.join(".cargo-ok"); - let _ = fs::remove_file(&ok_file); + let _ = paths::remove_file(&ok_file); info!("reset {} to {}", self.repo.path().display(), self.revision); let object = self.repo.find_object(self.revision.0, None)?; reset(&self.repo, &object, config)?; @@ -351,7 +376,7 @@ impl<'a> GitCheckout<'a> { } Err(..) => { let path = parent.workdir().unwrap().join(child.path()); - let _ = fs::remove_dir_all(&path); + let _ = paths::remove_dir_all(&path); git2::Repository::init(&path)? } }; @@ -644,10 +669,40 @@ pub fn fetch(repo: &mut git2::Repository, maybe_gc_repo(repo)?; debug!("doing a fetch for {}", url); - with_fetch_options(&repo.config()?, url, config, &mut |mut opts| { - debug!("initiating fetch of {} from {}", refspec, url); - let mut remote = repo.remote_anonymous(url.as_str())?; - remote.fetch(&[refspec], Some(&mut opts), None)?; + let git_config = git2::Config::open_default()?; + with_fetch_options(&git_config, url, config, &mut |mut opts| { + // The `fetch` operation here may fail spuriously due to a corrupt + // repository. It could also fail, however, for a whole slew of other + // reasons (aka network related reasons). We want Cargo to automatically + // recover from corrupt repositories, but we don't want Cargo to stomp + // over other legitimate errors.o + // + // Consequently we save off the error of the `fetch` operation and if it + // looks like a "corrupt repo" error then we blow away the repo and try + // again. If it looks like any other kind of error, or if we've already + // blown away the repository, then we want to return the error as-is. + let mut repo_reinitialized = false; + loop { + debug!("initiating fetch of {} from {}", refspec, url); + let res = repo.remote_anonymous(url.as_str())? + .fetch(&[refspec], Some(&mut opts), None); + let err = match res { + Ok(()) => break, + Err(e) => e, + }; + debug!("fetch failed: {}", err); + + if !repo_reinitialized && err.class() == git2::ErrorClass::Reference { + repo_reinitialized = true; + debug!("looks like this is a corrupt repository, reinitializing \ + and trying again"); + if reinitialize(repo).is_ok() { + continue + } + } + + return Err(err.into()) + } Ok(()) }) } @@ -703,30 +758,33 @@ fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { } // Alright all else failed, let's start over. - // + reinitialize(repo) +} + +fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> { // Here we want to drop the current repository object pointed to by `repo`, // so we initialize temporary repository in a sub-folder, blow away the // existing git folder, and then recreate the git repo. Finally we blow away // the `tmp` folder we allocated. let path = repo.path().to_path_buf(); + debug!("reinitializing git repo at {:?}", path); let tmp = path.join("tmp"); - mem::replace(repo, git2::Repository::init(&tmp)?); + let bare = !repo.path().ends_with(".git"); + *repo = git2::Repository::init(&tmp)?; for entry in path.read_dir()? { let entry = entry?; if entry.file_name().to_str() == Some("tmp") { continue } let path = entry.path(); - drop(fs::remove_file(&path).or_else(|_| fs::remove_dir_all(&path))); + drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); } - if repo.is_bare() { - mem::replace(repo, git2::Repository::init_bare(path)?); + if bare { + *repo = git2::Repository::init_bare(path)?; } else { - mem::replace(repo, git2::Repository::init(path)?); + *repo = git2::Repository::init(path)?; } - fs::remove_dir_all(&tmp).chain_err(|| { - format!("failed to remove {:?}", tmp) - })?; + paths::remove_dir_all(&tmp)?; Ok(()) } diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs index 28e720637..a6238ece9 100644 --- a/src/cargo/util/flock.rs +++ b/src/cargo/util/flock.rs @@ -9,6 +9,7 @@ use fs2::{FileExt, lock_contended_error}; use libc; use util::Config; +use util::paths; use util::errors::{CargoResult, CargoResultExt, CargoError}; pub struct FileLock { @@ -49,7 +50,7 @@ impl FileLock { /// /// This can be useful if a directory is locked with a sentinel file but it /// needs to be cleared out as it may be corrupt. - pub fn remove_siblings(&self) -> io::Result<()> { + pub fn remove_siblings(&self) -> CargoResult<()> { let path = self.path(); for entry in path.parent().unwrap().read_dir()? { let entry = entry?; @@ -58,9 +59,9 @@ impl FileLock { } let kind = entry.file_type()?; if kind.is_dir() { - fs::remove_dir_all(entry.path())?; + paths::remove_dir_all(entry.path())?; } else { - fs::remove_file(entry.path())?; + paths::remove_file(entry.path())?; } } Ok(()) diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index c2d290182..29f37d72d 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -1,7 +1,7 @@ use std::env; use std::ffi::{OsStr, OsString}; -use std::fs::File; -use std::fs::OpenOptions; +use std::fs::{self, File, OpenOptions}; +use std::io; use std::io::prelude::*; use std::path::{Path, PathBuf, Component}; @@ -188,3 +188,72 @@ impl<'a> Iterator for PathAncestors<'a> { } } } + +pub fn remove_dir_all>(p: P) -> CargoResult<()> { + _remove_dir_all(p.as_ref()) +} + +fn _remove_dir_all(p: &Path) -> CargoResult<()> { + if p.symlink_metadata()?.file_type().is_symlink() { + return remove_file(p) + } + let entries = p.read_dir().chain_err(|| { + format!("failed to read directory `{}`", p.display()) + })?; + for entry in entries { + let entry = entry?; + let path = entry.path(); + if entry.file_type()?.is_dir() { + remove_dir_all(&path)?; + } else { + remove_file(&path)?; + } + } + remove_dir(&p) +} + +pub fn remove_dir>(p: P) -> CargoResult<()> { + _remove_dir(p.as_ref()) +} + +fn _remove_dir(p: &Path) -> CargoResult<()> { + fs::remove_dir(p).chain_err(|| { + format!("failed to remove directory `{}`", p.display()) + })?; + Ok(()) +} + +pub fn remove_file>(p: P) -> CargoResult<()> { + _remove_file(p.as_ref()) +} + +fn _remove_file(p: &Path) -> CargoResult<()> { + let mut err = match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if err.kind() == io::ErrorKind::PermissionDenied { + if set_not_readonly(p).unwrap_or(false) { + match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => err = e, + } + } + } + + Err(err).chain_err(|| { + format!("failed to remove file `{}`", p.display()) + })?; + Ok(()) +} + +fn set_not_readonly(p: &Path) -> io::Result { + let mut perms = p.metadata()?.permissions(); + if !perms.readonly() { + return Ok(false) + } + perms.set_readonly(false); + fs::set_permissions(p, perms)?; + Ok(true) +} diff --git a/tests/testsuite/corrupt_git.rs b/tests/testsuite/corrupt_git.rs new file mode 100644 index 000000000..e5e9a633a --- /dev/null +++ b/tests/testsuite/corrupt_git.rs @@ -0,0 +1,161 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use cargo::util::paths as cargopaths; +use cargotest::support::paths; +use cargotest::support::{git, project, execs}; +use hamcrest::assert_that; + +#[test] +fn deleting_database_files() { + let project = project("foo"); + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, git_project.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(project.cargo("build"), execs().with_status(0)); + + let mut files = Vec::new(); + find_files(&paths::home().join(".cargo/git/db"), &mut files); + assert!(files.len() > 0); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0)); + + if !file.exists() { + continue + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0)); + } +} + +#[test] +fn deleting_checkout_files() { + let project = project("foo"); + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let project = project + .file("Cargo.toml", &format!(r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, git_project.url())) + .file("src/lib.rs", "") + .build(); + + assert_that(project.cargo("build"), execs().with_status(0)); + + let dir = paths::home() + .join(".cargo/git/checkouts") + // get the first entry in the checkouts dir for the package's location + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // get the first child of that checkout dir for our checkout + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // and throw on .git to corrupt things + .join(".git"); + let mut files = Vec::new(); + find_files(&dir, &mut files); + assert!(files.len() > 0); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0)); + + if !file.exists() { + continue + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"), + execs().with_status(0)); + } +} + +fn make_writable(path: &Path) { + let mut p = path.metadata().unwrap().permissions(); + p.set_readonly(false); + fs::set_permissions(path, p).unwrap(); +} + +fn find_files(path: &Path, dst: &mut Vec) { + for e in path.read_dir().unwrap() { + let e = e.unwrap(); + let path = e.path(); + if e.file_type().unwrap().is_dir() { + find_files(&path, dst); + } else { + dst.push(path); + } + } +} diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs index 13bffe055..3f5c9dcb1 100644 --- a/tests/testsuite/git.rs +++ b/tests/testsuite/git.rs @@ -855,7 +855,8 @@ git = git_project.url(), dir = p.url()))); [UPDATING] git repository [..] [ERROR] Unable to update [..] -To learn more, run the command again with --verbose. +Caused by: + revspec '0.1.2' not found; [..] ")); // Specifying a precise rev to the old rev shouldn't actually update diff --git a/tests/testsuite/lib.rs b/tests/testsuite/lib.rs index 321364b47..9f66a61d4 100644 --- a/tests/testsuite/lib.rs +++ b/tests/testsuite/lib.rs @@ -38,6 +38,7 @@ mod check; mod clean; mod concurrent; mod config; +mod corrupt_git; mod cross_compile; mod cross_publish; mod death; -- 2.30.2