}
static FEAT: Feature = Feature {
name: stringify!($feature),
- get: get,
+ get,
};
&FEAT
}
trace!("\t-> {}", dep);
}
let sub_map = self.locked.entry(id.source_id().clone())
- .or_insert(HashMap::new());
+ .or_insert_with(HashMap::new);
let sub_vec = sub_map.entry(id.name().to_string())
- .or_insert(Vec::new());
+ .or_insert_with(Vec::new);
sub_vec.push((id, deps));
}
for member in members.iter() {
build_pkg(member, ws.config(), &mut ret, &mut visited);
}
- for (_, deps) in ws.root_patch() {
+ for deps in ws.root_patch().values() {
for dep in deps {
build_dep(dep, ws.config(), &mut ret, &mut visited);
}
impl ConflictReason {
fn is_links(&self) -> bool {
- match self {
- &ConflictReason::Semver => false,
- &ConflictReason::Links(_) => true,
+ match *self {
+ ConflictReason::Semver => false,
+ ConflictReason::Links(_) => true,
}
}
}
registry,
&parent,
&dep,
- conflicting,
+ &conflicting,
&candidates,
config,
)
/// remaining candidates. For each one, also checks if rolling back
/// could change the outcome of the failed resolution that caused backtracking
/// in the first place. Namely, if we've backtracked past the parent of the
-/// failed dep, or any of the packages flagged as giving us trouble in conflicting_activations.
-/// Read https://github.com/rust-lang/cargo/pull/4834
+/// failed dep, or any of the packages flagged as giving us trouble in `conflicting_activations`.
+/// Read <https://github.com/rust-lang/cargo/pull/4834>
/// For several more detailed explanations of the logic here.
///
/// If the outcome could differ, resets `cx` and `remaining_deps` to that
registry: &mut Registry,
parent: &Summary,
dep: &Dependency,
- conflicting_activations: HashMap<PackageId, ConflictReason>,
+ conflicting_activations: &HashMap<PackageId, ConflictReason>,
candidates: &[Candidate],
config: Option<&Config>) -> CargoError {
let graph = cx.graph();
let (links_errors, other_errors): (Vec<_>, Vec<_>) = conflicting_activations.drain(..).rev().partition(|&(_, r)| r.is_links());
for &(p, r) in &links_errors {
- match r {
- &ConflictReason::Links(ref link) => {
- msg.push_str("\n\nthe package `");
- msg.push_str(dep.name());
- msg.push_str("` links to the native library `");
- msg.push_str(&link);
- msg.push_str("`, but it conflicts with a previous package which links to `");
- msg.push_str(&link);
- msg.push_str("` as well:\n");
- },
- _ => (),
+ if let ConflictReason::Links(ref link) = *r {
+ msg.push_str("\n\nthe package `");
+ msg.push_str(dep.name());
+ msg.push_str("` links to the native library `");
+ msg.push_str(link);
+ msg.push_str("`, but it conflicts with a previous package which links to `");
+ msg.push_str(link);
+ msg.push_str("` as well:\n");
}
msg.push_str(&describe_path(p));
}
return Ok(());
}
for f in self.summary.features().get(feat).expect("must be a valid feature") {
- if f == &feat {
+ if f == feat {
bail!("Cyclic feature dependency: feature `{}` depends on itself", feat);
}
self.add_feature(f)?;
}
Method::Required { uses_default_features: false, .. } => {}
}
- return Ok(reqs);
+ Ok(reqs)
}
impl<'a> Context<'a> {
.entry(id.name().to_string())
.or_insert_with(HashMap::new)
.entry(id.source_id().clone())
- .or_insert(Vec::new());
+ .or_insert_with(Vec::new);
if !prev.iter().any(|c| c == summary) {
self.resolve_graph.push(GraphNode::Add(id.clone()));
if let Some(link) = summary.links() {
let mut base = base.1;
base.extend(dep.features().iter().cloned());
for feature in base.iter() {
- if feature.contains("/") {
+ if feature.contains('/') {
bail!("feature names may not contain slashes: `{}`", feature);
}
}
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match &self.err {
- &ShellOut::Write(_) => {
+ match self.err {
+ ShellOut::Write(_) => {
f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.finish()
}
- &ShellOut::Stream { color_choice, .. } => {
+ ShellOut::Stream { color_choice, .. } => {
f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
}
}
+impl Default for Shell {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
impl ShellOut {
/// Print out a message with a status. The status comes first and is bold + the given color.
/// The status can be justified, in which case the max width that will right align is 12 chars.
}
}
-/// A `Display`able view into a SourceId that will write it as a url
+/// A `Display`able view into a `SourceId` that will write it as a url
pub struct SourceIdToUrl<'a> {
inner: &'a SourceIdInner,
}
match *self.packages.load(&ances_manifest_path)?.workspace_config() {
WorkspaceConfig::Root(ref ances_root_config) => {
debug!("find_root - found a root checking exclusion");
- if !ances_root_config.is_excluded(&manifest_path) {
+ if !ances_root_config.is_excluded(manifest_path) {
debug!("find_root - found!");
return Ok(Some(ances_manifest_path))
}
return Ok(())
}
- match *self.packages.load(root_manifest)?.workspace_config() {
- WorkspaceConfig::Root(ref root_config) => {
- if root_config.is_excluded(&manifest_path) {
- return Ok(())
- }
+ if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() {
+ if root_config.is_excluded(&manifest_path) {
+ return Ok(())
}
- _ => {}
}
debug!("find_members - {}", manifest_path.display());
#![cfg_attr(test, deny(warnings))]
#![recursion_limit="128"]
+// Currently, Cargo does not use clippy for its source code.
+// But if someone runs it they should know that
+// @alexcrichton disagree with clippy on some style things
+#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
+
#[macro_use] extern crate failure;
#[macro_use] extern crate log;
#[macro_use] extern crate scoped_tls;
pub use util::{CargoError, CargoResult, CliError, CliResult, Config};
pub use util::errors::Internal;
-pub const CARGO_ENV: &'static str = "CARGO";
+pub const CARGO_ENV: &str = "CARGO";
pub mod core;
pub mod ops;
config,
build_config,
profiles,
- exec)?
+ &exec)?
};
ret.to_doc_test = to_builds.into_iter().cloned().collect();
or marking one of the targets as `doc = false`.",
target.crate_name(), prev, package);
}
- } else {
- if let Some(prev) = bin_names.insert(target.crate_name(), package) {
- bail!("The binary `{}` is specified by packages `{}` and \
- `{}` but can be documented only once. Consider renaming \
- or marking one of the targets as `doc = false`.",
- target.crate_name(), prev, package);
- }
+ } else if let Some(prev) = bin_names.insert(target.crate_name(), package) {
+ bail!("The binary `{}` is specified by packages `{}` and \
+ `{}` but can be documented only once. Consider renaming \
+ or marking one of the targets as `doc = false`.",
+ target.crate_name(), prev, package);
}
}
}
let mut changes = BTreeMap::new();
let empty = (Vec::new(), Vec::new());
for dep in previous_resolve.iter() {
- changes.entry(key(dep)).or_insert(empty.clone()).0.push(dep);
+ changes.entry(key(dep)).or_insert_with(||empty.clone()).0.push(dep);
}
for dep in resolve.iter() {
- changes.entry(key(dep)).or_insert(empty.clone()).1.push(dep);
+ changes.entry(key(dep)).or_insert_with(||empty.clone()).1.push(dep);
}
for v in changes.values_mut() {
}
}
list.v1.entry(pkg.package_id().clone())
- .or_insert_with(|| BTreeSet::new())
+ .or_insert_with(BTreeSet::new)
.insert(bin.to_string());
}
// If installation was successful record newly installed binaries.
if result.is_ok() {
list.v1.entry(pkg.package_id().clone())
- .or_insert_with(|| BTreeSet::new())
+ .or_insert_with(BTreeSet::new)
.extend(to_install.iter().map(|s| s.to_string()));
}
// version range, otherwise parse it as a specific version
let first = v.chars()
.nth(0)
- .ok_or(format_err!("no version provided for the `--vers` flag"))?;
+ .ok_or_else(||format_err!("no version provided for the `--vers` flag"))?;
match first {
'<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {
specs: Vec<&str>,
bins: &[String],
config: &Config) -> CargoResult<()> {
- if specs.len() > 1 && bins.len() > 0 {
+ if specs.len() > 1 && !bins.is_empty() {
bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
}
let root = resolve_root(root, config)?;
let scheduled_error = if specs.len() == 1 {
- uninstall_one(root, specs[0], bins, config)?;
+ uninstall_one(&root, specs[0], bins, config)?;
false
} else {
let mut succeeded = vec![];
let mut failed = vec![];
for spec in specs {
let root = root.clone();
- match uninstall_one(root, spec, bins, config) {
+ match uninstall_one(&root, spec, bins, config) {
Ok(()) => succeeded.push(spec),
Err(e) => {
::handle_error(e, &mut config.shell());
Ok(())
}
-pub fn uninstall_one(root: Filesystem,
+pub fn uninstall_one(root: &Filesystem,
spec: &str,
bins: &[String],
config: &Config) -> CargoResult<()> {
- let crate_metadata = metadata(config, &root)?;
+ let crate_metadata = metadata(config, root)?;
let mut metadata = read_crate_list(&crate_metadata)?;
let mut to_remove = Vec::new();
{
let kind = match (bin, lib) {
(true, true) => bail!("can't specify both lib and binary outputs"),
- (true, false) => NewProjectKind::Bin,
(false, true) => NewProjectKind::Lib,
// default to bin
- (false, false) => NewProjectKind::Bin,
+ (_, false) => NewProjectKind::Bin,
};
let opts = NewOptions { version_control, kind, path, name };
.map(|bin| bin.name())
.collect();
- if bins.len() == 0 {
+ if bins.is_empty() {
if !options.filter.is_specific() {
bail!("a bin target must be available for `cargo run`")
} else {
/// Return the target triple which this context is targeting.
pub fn target_triple(&self) -> &str {
- self.requested_target().unwrap_or(self.host_triple())
+ self.requested_target().unwrap_or_else(|| self.host_triple())
}
/// Requested (not actual) target for the build
match *crate_type_info {
Some((ref prefix, ref suffix)) => {
let suffixes = add_target_specific_suffixes(
- &self.target_triple(),
- &crate_type,
+ self.target_triple(),
+ crate_type,
unit.target.kind(),
suffix,
file_type,
}
pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile {
- if !target.is_custom_build() && !target.for_host() {
- if unit.profile.check || (unit.profile.doc && !unit.profile.test) {
+ if !target.is_custom_build() && !target.for_host()
+ && (unit.profile.check || (unit.profile.doc && !unit.profile.test)) {
return &self.profiles.check
- }
}
self.lib_profile()
}
match *cfg {
Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); }
Cfg::KeyPair(ref k, ref v) => {
- match *cfg_map.entry(k.clone()).or_insert(Some(Vec::new())) {
- Some(ref mut values) => values.push(v.clone()),
- None => { /* ... */ }
+ if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(||Some(Vec::new())) {
+ values.push(v.clone())
}
}
}
// track of whether it has told us about some explicit dependencies
let prev_root_output = paths::read_bytes(&root_output_file)
.and_then(|bytes| util::bytes2path(&bytes))
- .unwrap_or(cmd.get_cwd().unwrap().to_path_buf());
+ .unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf());
let prev_output = BuildOutput::parse_file(
&output_file,
&pkg_name,
// well.
paths::write(&output_file, &output.stdout)?;
paths::write(&err_file, &output.stderr)?;
- paths::write(&root_output_file, &util::path2bytes(&root_output)?)?;
+ paths::write(&root_output_file, util::path2bytes(&root_output)?)?;
let parsed_output = BuildOutput::parse(
&output.stdout,
&pkg_name,
}
}
- let prev = out.entry(*unit).or_insert(BuildScripts::default());
+ let prev = out.entry(*unit).or_insert_with(BuildScripts::default);
for (pkg, kind) in ret.to_link {
add_to_link(prev, &pkg, kind);
}
profile,
ref deps,
ref local,
- memoized_hash: _,
epoch,
ref rustflags,
+ ..
} = *self;
(rustc, features, target, path, profile, local, epoch, rustflags).hash(h);
.filter(|x| !x.is_empty())
.map(|p| util::bytes2path(p).map(|p| pkg.root().join(p)))
.collect::<Result<Vec<_>, _>>()?;
- if paths.len() == 0 {
+ if paths.is_empty() {
Ok(None)
} else {
Ok(Some(paths))
use util::CargoResult;
use super::Unit;
+#[derive(Default)]
pub struct Links<'a> {
validated: HashSet<&'a PackageId>,
links: HashMap<String, &'a PackageId>,
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles,
- exec: Arc<Executor>)
+ exec: &Arc<Executor>)
-> CargoResult<Compilation<'cfg>> {
let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| {
let default_kind = if build_config.requested_target.is_some() {
// part of this, that's all done next as part of the `execute`
// function which will run everything in order with proper
// parallelism.
- compile(&mut cx, &mut queue, unit, Arc::clone(&exec))?;
+ compile(&mut cx, &mut queue, unit, exec)?;
}
// Now that we've figured out everything that we're going to do, do it!
cx.compilation.binaries.push(bindst.clone());
} else if unit.target.is_lib() {
let pkgid = unit.pkg.package_id().clone();
- cx.compilation.libraries.entry(pkgid).or_insert(HashSet::new())
+ cx.compilation.libraries.entry(pkgid).or_insert_with(HashSet::new)
.insert((unit.target.clone(), dst.clone()));
}
}
if dep.profile.run_custom_build {
let out_dir = cx.build_script_out_dir(dep).display().to_string();
cx.compilation.extra_env.entry(dep.pkg.package_id().clone())
- .or_insert(Vec::new())
+ .or_insert_with(Vec::new)
.push(("OUT_DIR".to_string(), out_dir));
}
let v = cx.target_filenames(dep)?;
cx.compilation.libraries
.entry(unit.pkg.package_id().clone())
- .or_insert(HashSet::new())
+ .or_insert_with(HashSet::new)
.extend(v.iter().map(|&(ref f, _, _)| {
(dep.target.clone(), f.clone())
}));
feats.iter().map(|feat| format!("feature=\"{}\"", feat)).collect()
});
}
- let rustdocflags = cx.rustdocflags_args(&unit)?;
+ let rustdocflags = cx.rustdocflags_args(unit)?;
if !rustdocflags.is_empty() {
cx.compilation.rustdocflags.entry(unit.pkg.package_id().clone())
.or_insert(rustdocflags);
fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
jobs: &mut JobQueue<'a>,
unit: &Unit<'a>,
- exec: Arc<Executor>) -> CargoResult<()> {
+ exec: &Arc<Executor>) -> CargoResult<()> {
if !cx.compiled.insert(*unit) {
return Ok(())
}
let work = if unit.profile.doc {
rustdoc(cx, unit)?
} else {
- rustc(cx, unit, Arc::clone(&exec))?
+ rustc(cx, unit, exec)?
};
// Need to link targets on both the dirty and fresh
let dirty = work.then(link_targets(cx, unit, false)?).then(dirty);
// Be sure to compile all dependencies of this target as well.
for unit in cx.dep_targets(unit)?.iter() {
- compile(cx, jobs, unit, exec.clone())?;
+ compile(cx, jobs, unit, exec)?;
}
Ok(())
fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
- exec: Arc<Executor>) -> CargoResult<Work> {
+ exec: &Arc<Executor>) -> CargoResult<Work> {
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
let name = unit.pkg.name().to_string();
let root_output = cx.target_root().to_path_buf();
let pkg_root = unit.pkg.root().to_path_buf();
- let cwd = rustc.get_cwd().unwrap_or(cx.config.cwd()).to_path_buf();
+ let cwd = rustc.get_cwd().unwrap_or_else(|| cx.config.cwd()).to_path_buf();
return Ok(Work::new(move |state| {
// Only at runtime have we discovered what the extra -L and -l
let Profile {
ref opt_level, ref lto, codegen_units, ref rustc_args, debuginfo,
debug_assertions, overflow_checks, rpath, test, doc: _doc,
- run_custom_build, ref panic, rustdoc_args: _, check, incremental: _,
+ run_custom_build, ref panic, check, ..
} = *unit.profile;
assert!(!run_custom_build);
// error in the future, see PR #4797
if !dep_targets.iter().any(|u| !u.profile.doc && u.target.linkable()) {
if let Some(u) = dep_targets.iter()
- .filter(|u| !u.profile.doc && u.target.is_lib())
- .next() {
+ .find(|u| !u.profile.doc && u.target.is_lib()) {
cx.config.shell().warn(format!("The package `{}` \
provides no linkable target. The compiler might raise an error while compiling \
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
cmd.env("OUT_DIR", &cx.build_script_out_dir(&dep));
}
if dep.target.linkable() && !dep.profile.doc {
- link_to(cmd, cx, &unit, &dep)?;
+ link_to(cmd, cx, unit, &dep)?;
}
}
bail!("cannot publish crates which activate nightly-only cargo features to crates.io")
}
- if let &Some(ref allowed_registries) = pkg.publish() {
+ if let Some(ref allowed_registries) = *pkg.publish() {
if !match opts.registry {
Some(ref registry) => allowed_registries.contains(registry),
None => false,
// Skip dotfile directories
if name.map(|s| s.starts_with('.')) == Some(true) {
continue
- } else if is_root {
+ }
+ if is_root {
// Skip cargo artifacts
match name {
Some("target") | Some("Cargo.lock") => continue,
use util::hex;
use util::to_url::ToUrl;
-const INDEX_LOCK: &'static str = ".cargo-index-lock";
-pub const CRATES_IO: &'static str = "https://github.com/rust-lang/crates.io-index";
-const CRATE_TEMPLATE: &'static str = "{crate}";
-const VERSION_TEMPLATE: &'static str = "{version}";
+const INDEX_LOCK: &str = ".cargo-index-lock";
+pub const CRATES_IO: &str = "https://github.com/rust-lang/crates.io-index";
+const CRATE_TEMPLATE: &str = "{crate}";
+const VERSION_TEMPLATE: &str = "{version}";
pub struct RegistrySource<'cfg> {
source_id: SourceId,
killed = true;
}
- return killed
+ killed
}
}
let more_explain = format!("failed to join search paths together\n\
Does ${} have an unterminated quote character?",
env);
- return Err(err.context(more_explain).into())
+ Err(err.context(more_explain).into())
}
pub fn dylib_path_envvar() -> &'static str {
}
}
-/// A helper function to create a ProcessBuilder.
+/// A helper function to create a `ProcessBuilder`.
pub fn process<T: AsRef<OsStr>>(cmd: T) -> ProcessBuilder {
ProcessBuilder {
program: cmd.as_ref().to_os_string(),