script:
- cargo test
- cargo doc --no-deps
- - (cd src/doc && mdbook build --no-create --dest-dir ../../target/doc)
+ - (cd src/doc && mdbook build --dest-dir ../../target/doc)
exclude:
- rust: stable
use core::{Profiles, Workspace};
use util::Config;
use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
use ops::{self, Context, BuildConfig, Kind, Unit};
pub struct CleanOptions<'a> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
- fs::remove_dir_all(path).chain_err(|| {
+ paths::remove_dir_all(path).chain_err(|| {
format_err!("could not remove build directory")
})?;
} else if m.is_ok() {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
- fs::remove_file(path).chain_err(|| {
+ paths::remove_file(path).chain_err(|| {
format_err!("failed to remove build artifact")
})?;
}
Ok(())
-}
\ No newline at end of file
+}
use util::{Config, internal};
use util::{Filesystem, FileLock};
use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
#[derive(Deserialize, Serialize)]
#[serde(untagged)]
impl Drop for Transaction {
fn drop(&mut self) {
for bin in self.bins.iter() {
- let _ = fs::remove_file(bin);
+ let _ = paths::remove_file(bin);
}
}
}
// Don't bother grabbing a lock as we're going to blow it all away
// anyway.
let target_dir = ws.target_dir().into_path_unlocked();
- fs::remove_dir_all(&target_dir)?;
+ paths::remove_dir_all(&target_dir)?;
}
Ok(())
write_crate_list(&crate_metadata, metadata)?;
for bin in to_remove {
config.shell().status("Removing", bin.display())?;
- fs::remove_file(bin)?;
+ paths::remove_file(bin)?;
}
Ok(())
let f = GzDecoder::new(tar.file());
let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version()));
- if fs::metadata(&dst).is_ok() {
- fs::remove_dir_all(&dst)?;
+ if dst.exists() {
+ paths::remove_dir_all(&dst)?;
}
let mut archive = Archive::new(f);
archive.unpack(dst.parent().unwrap())?;
use core::shell::ColorChoice;
use util::{self, ProcessBuilder, machine_message};
use util::{Config, internal, profile, join_paths};
+use util::paths;
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::Freshness;
if filename.extension() == Some(OsStr::new("rmeta")) {
let dst = root.join(filename).with_extension("rlib");
if dst.exists() {
- fs::remove_file(&dst).chain_err(|| {
- format!("Could not remove file: {}.", dst.display())
- })?;
+ paths::remove_file(&dst)?;
}
}
}
continue
}
if dst.exists() {
- fs::remove_file(&dst).chain_err(|| {
- format!("failed to remove: {}", dst.display())
- })?;
+ paths::remove_file(&dst)?;
}
let link_result = if src.is_dir() {
use std::collections::HashSet;
-use std::io::{Write, BufWriter, ErrorKind};
-use std::fs::{self, File};
+use std::io::{Write, BufWriter};
+use std::fs::File;
use std::path::{Path, PathBuf};
use ops::{Context, Unit};
use util::{CargoResult, internal};
+use util::paths;
use ops::cargo_rustc::fingerprint;
fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {
write!(outfile, " {}", render_filename(dep, basedir)?)?;
}
writeln!(outfile, "")?;
- } else if let Err(err) = fs::remove_file(output_path) {
- // dep-info generation failed, so delete output file. This will usually
- // cause the build system to always rerun the build rule, which is correct
- // if inefficient.
- if err.kind() != ErrorKind::NotFound {
- return Err(err.into());
- }
+
+ // dep-info generation failed, so delete output file. This will
+ // usually cause the build system to always rerun the build
+ // rule, which is correct if inefficient.
+ } else if output_path.exists() {
+ paths::remove_file(output_path)?;
}
}
}
use core::GitReference;
use core::{Package, PackageId, Summary, Registry, Dependency};
use util::Config;
-use util::errors::{CargoResult, Internal};
+use util::errors::CargoResult;
use util::hex::short_hash;
use sources::PathSource;
use sources::git::utils::{GitRemote, GitRevision};
trace!("updating git source `{:?}`", self.remote);
- let repo = self.remote.checkout(&db_path, self.config)?;
- let rev = repo.rev_for(&self.reference).map_err(Internal::new)?;
- (repo, rev)
+ self.remote.checkout(&db_path, &self.reference, self.config)?
} else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap())
};
use core::GitReference;
use util::{ToUrl, internal, Config, network, Progress};
+use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
#[derive(PartialEq, Clone, Debug)]
pub fn rev_for(&self, path: &Path, reference: &GitReference)
-> CargoResult<GitRevision> {
- let db = self.db_at(path)?;
- db.rev_for(reference)
+ reference.resolve(&self.db_at(path)?.repo)
}
- pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult<GitDatabase> {
- let repo = match git2::Repository::open(into) {
- Ok(mut repo) => {
- self.fetch_into(&mut repo, cargo_config).chain_err(|| {
- format!("failed to fetch into {}", into.display())
- })?;
- repo
+ pub fn checkout(&self,
+ into: &Path,
+ reference: &GitReference,
+ cargo_config: &Config)
+ -> CargoResult<(GitDatabase, GitRevision)>
+ {
+ let mut repo_and_rev = None;
+ if let Ok(mut repo) = git2::Repository::open(into) {
+ self.fetch_into(&mut repo, cargo_config).chain_err(|| {
+ format!("failed to fetch into {}", into.display())
+ })?;
+ if let Ok(rev) = reference.resolve(&repo) {
+ repo_and_rev = Some((repo, rev));
}
- Err(..) => {
- self.clone_into(into, cargo_config).chain_err(|| {
+ }
+ let (repo, rev) = match repo_and_rev {
+ Some(pair) => pair,
+ None => {
+ let repo = self.clone_into(into, cargo_config).chain_err(|| {
format!("failed to clone into: {}", into.display())
- })?
+ })?;
+ let rev = reference.resolve(&repo)?;
+ (repo, rev)
}
};
- Ok(GitDatabase {
+ Ok((GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo,
- })
+ }, rev))
}
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult<git2::Repository> {
if fs::metadata(&dst).is_ok() {
- fs::remove_dir_all(dst)?;
+ paths::remove_dir_all(dst)?;
}
fs::create_dir_all(dst)?;
let mut repo = git2::Repository::init_bare(dst)?;
impl GitDatabase {
pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config)
-> CargoResult<GitCheckout> {
- let checkout = match git2::Repository::open(dest) {
- Ok(repo) => {
- let mut checkout = GitCheckout::new(dest, self, rev, repo);
- if !checkout.is_fresh() {
- checkout.fetch(cargo_config)?;
- checkout.reset(cargo_config)?;
- assert!(checkout.is_fresh());
+ let mut checkout = None;
+ if let Ok(repo) = git2::Repository::open(dest) {
+ let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
+ if !co.is_fresh() {
+ // After a successful fetch operation do a sanity check to
+ // ensure we've got the object in our database to reset to. This
+ // can fail sometimes for corrupt repositories where the fetch
+ // operation succeeds but the object isn't actually there.
+ co.fetch(cargo_config)?;
+ if co.has_object() {
+ co.reset(cargo_config)?;
+ assert!(co.is_fresh());
+ checkout = Some(co);
}
- checkout
+ } else {
+ checkout = Some(co);
}
- Err(..) => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
+ };
+ let checkout = match checkout {
+ Some(c) => c,
+ None => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
};
checkout.update_submodules(cargo_config)?;
Ok(checkout)
}
- pub fn rev_for(&self, reference: &GitReference) -> CargoResult<GitRevision> {
- let id = match *reference {
+ pub fn to_short_id(&self, revision: GitRevision) -> CargoResult<GitShortID> {
+ let obj = self.repo.find_object(revision.0, None)?;
+ Ok(GitShortID(obj.short_id()?))
+ }
+
+ pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
+ self.repo.revparse_single(reference)?;
+ Ok(())
+ }
+}
+
+impl GitReference {
+ fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
+ let id = match *self {
GitReference::Tag(ref s) => {
(|| -> CargoResult<git2::Oid> {
let refname = format!("refs/tags/{}", s);
- let id = self.repo.refname_to_id(&refname)?;
- let obj = self.repo.find_object(id, None)?;
+ let id = repo.refname_to_id(&refname)?;
+ let obj = repo.find_object(id, None)?;
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})().chain_err(|| {
}
GitReference::Branch(ref s) => {
(|| {
- let b = self.repo.find_branch(s, git2::BranchType::Local)?;
+ let b = repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| {
format_err!("branch `{}` did not have a target", s)
})
})?
}
GitReference::Rev(ref s) => {
- let obj = self.repo.revparse_single(s)?;
+ let obj = repo.revparse_single(s)?;
match obj.as_tag() {
Some(tag) => tag.target_id(),
None => obj.id(),
};
Ok(GitRevision(id))
}
-
- pub fn to_short_id(&self, revision: GitRevision) -> CargoResult<GitShortID> {
- let obj = self.repo.find_object(revision.0, None)?;
- Ok(GitShortID(obj.short_id()?))
- }
-
- pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
- self.repo.revparse_single(reference)?;
- Ok(())
- }
}
impl<'a> GitCheckout<'a> {
format!("Couldn't mkdir {}", dirname.display())
})?;
if into.exists() {
- fs::remove_dir_all(into).chain_err(|| {
- format!("Couldn't rmdir {}", into.display())
- })?;
+ paths::remove_dir_all(into)?;
}
// we're doing a local filesystem-to-filesystem clone so there should
Ok(())
}
+ fn has_object(&self) -> bool {
+ self.repo.find_object(self.revision.0, None).is_ok()
+ }
+
fn reset(&self, config: &Config) -> CargoResult<()> {
// If we're interrupted while performing this reset (e.g. we die because
// of a signal) Cargo needs to be sure to try to check out this repo
// ready to go. Hence if we start to do a reset, we make sure this file
// *doesn't* exist, and then once we're done we create the file.
let ok_file = self.location.join(".cargo-ok");
- let _ = fs::remove_file(&ok_file);
+ let _ = paths::remove_file(&ok_file);
info!("reset {} to {}", self.repo.path().display(), self.revision);
let object = self.repo.find_object(self.revision.0, None)?;
reset(&self.repo, &object, config)?;
}
Err(..) => {
let path = parent.workdir().unwrap().join(child.path());
- let _ = fs::remove_dir_all(&path);
+ let _ = paths::remove_dir_all(&path);
git2::Repository::init(&path)?
}
};
maybe_gc_repo(repo)?;
debug!("doing a fetch for {}", url);
- with_fetch_options(&repo.config()?, url, config, &mut |mut opts| {
- debug!("initiating fetch of {} from {}", refspec, url);
- let mut remote = repo.remote_anonymous(url.as_str())?;
- remote.fetch(&[refspec], Some(&mut opts), None)?;
+ let git_config = git2::Config::open_default()?;
+ with_fetch_options(&git_config, url, config, &mut |mut opts| {
+ // The `fetch` operation here may fail spuriously due to a corrupt
+ // repository. It could also fail, however, for a whole slew of other
+ // reasons (aka network related reasons). We want Cargo to automatically
+ // recover from corrupt repositories, but we don't want Cargo to stomp
+ // over other legitimate errors.o
+ //
+ // Consequently we save off the error of the `fetch` operation and if it
+ // looks like a "corrupt repo" error then we blow away the repo and try
+ // again. If it looks like any other kind of error, or if we've already
+ // blown away the repository, then we want to return the error as-is.
+ let mut repo_reinitialized = false;
+ loop {
+ debug!("initiating fetch of {} from {}", refspec, url);
+ let res = repo.remote_anonymous(url.as_str())?
+ .fetch(&[refspec], Some(&mut opts), None);
+ let err = match res {
+ Ok(()) => break,
+ Err(e) => e,
+ };
+ debug!("fetch failed: {}", err);
+
+ if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
+ repo_reinitialized = true;
+ debug!("looks like this is a corrupt repository, reinitializing \
+ and trying again");
+ if reinitialize(repo).is_ok() {
+ continue
+ }
+ }
+
+ return Err(err.into())
+ }
Ok(())
})
}
}
// Alright all else failed, let's start over.
- //
+ reinitialize(repo)
+}
+
+fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
// Here we want to drop the current repository object pointed to by `repo`,
// so we initialize temporary repository in a sub-folder, blow away the
// existing git folder, and then recreate the git repo. Finally we blow away
// the `tmp` folder we allocated.
let path = repo.path().to_path_buf();
+ debug!("reinitializing git repo at {:?}", path);
let tmp = path.join("tmp");
- mem::replace(repo, git2::Repository::init(&tmp)?);
+ let bare = !repo.path().ends_with(".git");
+ *repo = git2::Repository::init(&tmp)?;
for entry in path.read_dir()? {
let entry = entry?;
if entry.file_name().to_str() == Some("tmp") {
continue
}
let path = entry.path();
- drop(fs::remove_file(&path).or_else(|_| fs::remove_dir_all(&path)));
+ drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
}
- if repo.is_bare() {
- mem::replace(repo, git2::Repository::init_bare(path)?);
+ if bare {
+ *repo = git2::Repository::init_bare(path)?;
} else {
- mem::replace(repo, git2::Repository::init(path)?);
+ *repo = git2::Repository::init(path)?;
}
- fs::remove_dir_all(&tmp).chain_err(|| {
- format!("failed to remove {:?}", tmp)
- })?;
+ paths::remove_dir_all(&tmp)?;
Ok(())
}
use libc;
use util::Config;
+use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
pub struct FileLock {
///
/// This can be useful if a directory is locked with a sentinel file but it
/// needs to be cleared out as it may be corrupt.
- pub fn remove_siblings(&self) -> io::Result<()> {
+ pub fn remove_siblings(&self) -> CargoResult<()> {
let path = self.path();
for entry in path.parent().unwrap().read_dir()? {
let entry = entry?;
}
let kind = entry.file_type()?;
if kind.is_dir() {
- fs::remove_dir_all(entry.path())?;
+ paths::remove_dir_all(entry.path())?;
} else {
- fs::remove_file(entry.path())?;
+ paths::remove_file(entry.path())?;
}
}
Ok(())
use std::env;
use std::ffi::{OsStr, OsString};
-use std::fs::File;
-use std::fs::OpenOptions;
+use std::fs::{self, File, OpenOptions};
+use std::io;
use std::io::prelude::*;
use std::path::{Path, PathBuf, Component};
}
}
}
+
+pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+ _remove_dir_all(p.as_ref())
+}
+
+fn _remove_dir_all(p: &Path) -> CargoResult<()> {
+ if p.symlink_metadata()?.file_type().is_symlink() {
+ return remove_file(p)
+ }
+ let entries = p.read_dir().chain_err(|| {
+ format!("failed to read directory `{}`", p.display())
+ })?;
+ for entry in entries {
+ let entry = entry?;
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ remove_dir_all(&path)?;
+ } else {
+ remove_file(&path)?;
+ }
+ }
+ remove_dir(&p)
+}
+
+pub fn remove_dir<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+ _remove_dir(p.as_ref())
+}
+
+fn _remove_dir(p: &Path) -> CargoResult<()> {
+ fs::remove_dir(p).chain_err(|| {
+ format!("failed to remove directory `{}`", p.display())
+ })?;
+ Ok(())
+}
+
+pub fn remove_file<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+ _remove_file(p.as_ref())
+}
+
+fn _remove_file(p: &Path) -> CargoResult<()> {
+ let mut err = match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => e,
+ };
+
+ if err.kind() == io::ErrorKind::PermissionDenied {
+ if set_not_readonly(p).unwrap_or(false) {
+ match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => err = e,
+ }
+ }
+ }
+
+ Err(err).chain_err(|| {
+ format!("failed to remove file `{}`", p.display())
+ })?;
+ Ok(())
+}
+
+fn set_not_readonly(p: &Path) -> io::Result<bool> {
+ let mut perms = p.metadata()?.permissions();
+ if !perms.readonly() {
+ return Ok(false)
+ }
+ perms.set_readonly(false);
+ fs::set_permissions(p, perms)?;
+ Ok(true)
+}
--- /dev/null
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use cargo::util::paths as cargopaths;
+use cargotest::support::paths;
+use cargotest::support::{git, project, execs};
+use hamcrest::assert_that;
+
+#[test]
+fn deleting_database_files() {
+ let project = project("foo");
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "")
+ }).unwrap();
+
+ let project = project
+ .file("Cargo.toml", &format!(r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#, git_project.url()))
+ .file("src/lib.rs", "")
+ .build();
+
+ assert_that(project.cargo("build"), execs().with_status(0));
+
+ let mut files = Vec::new();
+ find_files(&paths::home().join(".cargo/git/db"), &mut files);
+ assert!(files.len() > 0);
+
+ let log = "cargo::sources::git=trace";
+ for file in files {
+ if !file.exists() {
+ continue
+ }
+ println!("deleting {}", file.display());
+ cargopaths::remove_file(&file).unwrap();
+ assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0));
+
+ if !file.exists() {
+ continue
+ }
+ println!("truncating {}", file.display());
+ make_writable(&file);
+ fs::OpenOptions::new()
+ .write(true)
+ .open(&file)
+ .unwrap()
+ .set_len(2)
+ .unwrap();
+ assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0));
+ }
+}
+
+#[test]
+fn deleting_checkout_files() {
+ let project = project("foo");
+ let git_project = git::new("bar", |project| {
+ project
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.5.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "")
+ }).unwrap();
+
+ let project = project
+ .file("Cargo.toml", &format!(r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ bar = {{ git = '{}' }}
+ "#, git_project.url()))
+ .file("src/lib.rs", "")
+ .build();
+
+ assert_that(project.cargo("build"), execs().with_status(0));
+
+ let dir = paths::home()
+ .join(".cargo/git/checkouts")
+ // get the first entry in the checkouts dir for the package's location
+ .read_dir()
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap()
+ .path()
+ // get the first child of that checkout dir for our checkout
+ .read_dir()
+ .unwrap()
+ .next()
+ .unwrap()
+ .unwrap()
+ .path()
+ // and throw on .git to corrupt things
+ .join(".git");
+ let mut files = Vec::new();
+ find_files(&dir, &mut files);
+ assert!(files.len() > 0);
+
+ let log = "cargo::sources::git=trace";
+ for file in files {
+ if !file.exists() {
+ continue
+ }
+ println!("deleting {}", file.display());
+ cargopaths::remove_file(&file).unwrap();
+ assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0));
+
+ if !file.exists() {
+ continue
+ }
+ println!("truncating {}", file.display());
+ make_writable(&file);
+ fs::OpenOptions::new()
+ .write(true)
+ .open(&file)
+ .unwrap()
+ .set_len(2)
+ .unwrap();
+ assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0));
+ }
+}
+
+fn make_writable(path: &Path) {
+ let mut p = path.metadata().unwrap().permissions();
+ p.set_readonly(false);
+ fs::set_permissions(path, p).unwrap();
+}
+
+fn find_files(path: &Path, dst: &mut Vec<PathBuf>) {
+ for e in path.read_dir().unwrap() {
+ let e = e.unwrap();
+ let path = e.path();
+ if e.file_type().unwrap().is_dir() {
+ find_files(&path, dst);
+ } else {
+ dst.push(path);
+ }
+ }
+}
[UPDATING] git repository [..]
[ERROR] Unable to update [..]
-To learn more, run the command again with --verbose.
+Caused by:
+ revspec '0.1.2' not found; [..]
"));
// Specifying a precise rev to the old rev shouldn't actually update
mod clean;
mod concurrent;
mod config;
+mod corrupt_git;
mod cross_compile;
mod cross_publish;
mod death;