From 676edacfbaba8c9c55acee61c921900d188bc7e0 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Mon, 26 Feb 2018 21:02:39 -0500 Subject: [PATCH] some simple clippy things V2 --- src/cargo/core/features.rs | 2 +- src/cargo/core/registry.rs | 4 +-- src/cargo/core/resolver/encode.rs | 2 +- src/cargo/core/resolver/mod.rs | 41 +++++++++++------------ src/cargo/core/shell.rs | 12 +++++-- src/cargo/core/source/source_id.rs | 2 +- src/cargo/core/workspace.rs | 11 +++--- src/cargo/lib.rs | 7 +++- src/cargo/ops/cargo_compile.rs | 2 +- src/cargo/ops/cargo_doc.rs | 12 +++---- src/cargo/ops/cargo_generate_lockfile.rs | 4 +-- src/cargo/ops/cargo_install.rs | 16 ++++----- src/cargo/ops/cargo_new.rs | 3 +- src/cargo/ops/cargo_run.rs | 2 +- src/cargo/ops/cargo_rustc/context.rs | 11 +++--- src/cargo/ops/cargo_rustc/custom_build.rs | 11 +++--- src/cargo/ops/cargo_rustc/fingerprint.rs | 4 +-- src/cargo/ops/cargo_rustc/links.rs | 1 + src/cargo/ops/cargo_rustc/mod.rs | 29 ++++++++-------- src/cargo/ops/registry.rs | 2 +- src/cargo/sources/path.rs | 3 +- src/cargo/sources/registry/mod.rs | 8 ++--- src/cargo/util/job.rs | 2 +- src/cargo/util/paths.rs | 2 +- src/cargo/util/process_builder.rs | 2 +- 25 files changed, 98 insertions(+), 97 deletions(-) diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index 74437f7c9..20d60d17d 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -99,7 +99,7 @@ macro_rules! features { } static FEAT: Feature = Feature { name: stringify!($feature), - get: get, + get, }; &FEAT } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 31758f841..051adb84e 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -187,9 +187,9 @@ impl<'cfg> PackageRegistry<'cfg> { trace!("\t-> {}", dep); } let sub_map = self.locked.entry(id.source_id().clone()) - .or_insert(HashMap::new()); + .or_insert_with(HashMap::new); let sub_vec = sub_map.entry(id.name().to_string()) - .or_insert(Vec::new()); + .or_insert_with(Vec::new); sub_vec.push((id, deps)); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index bf985e4c8..b0f726493 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -206,7 +206,7 @@ fn build_path_deps(ws: &Workspace) -> HashMap { for member in members.iter() { build_pkg(member, ws.config(), &mut ret, &mut visited); } - for (_, deps) in ws.root_patch() { + for deps in ws.root_patch().values() { for dep in deps { build_dep(dep, ws.config(), &mut ret, &mut visited); } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index e4913268f..bb0e60404 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -540,9 +540,9 @@ enum ConflictReason { impl ConflictReason { fn is_links(&self) -> bool { - match self { - &ConflictReason::Semver => false, - &ConflictReason::Links(_) => true, + match *self { + ConflictReason::Semver => false, + ConflictReason::Links(_) => true, } } } @@ -739,7 +739,7 @@ fn activate_deps_loop<'a>( registry, &parent, &dep, - conflicting, + &conflicting, &candidates, config, ) @@ -780,8 +780,8 @@ fn activate_deps_loop<'a>( /// remaining candidates. For each one, also checks if rolling back /// could change the outcome of the failed resolution that caused backtracking /// in the first place. Namely, if we've backtracked past the parent of the -/// failed dep, or any of the packages flagged as giving us trouble in conflicting_activations. -/// Read https://github.com/rust-lang/cargo/pull/4834 +/// failed dep, or any of the packages flagged as giving us trouble in `conflicting_activations`. +/// Read /// For several more detailed explanations of the logic here. /// /// If the outcome could differ, resets `cx` and `remaining_deps` to that @@ -826,7 +826,7 @@ fn activation_error(cx: &Context, registry: &mut Registry, parent: &Summary, dep: &Dependency, - conflicting_activations: HashMap, + conflicting_activations: &HashMap, candidates: &[Candidate], config: Option<&Config>) -> CargoError { let graph = cx.graph(); @@ -860,17 +860,14 @@ fn activation_error(cx: &Context, let (links_errors, other_errors): (Vec<_>, Vec<_>) = conflicting_activations.drain(..).rev().partition(|&(_, r)| r.is_links()); for &(p, r) in &links_errors { - match r { - &ConflictReason::Links(ref link) => { - msg.push_str("\n\nthe package `"); - msg.push_str(dep.name()); - msg.push_str("` links to the native library `"); - msg.push_str(&link); - msg.push_str("`, but it conflicts with a previous package which links to `"); - msg.push_str(&link); - msg.push_str("` as well:\n"); - }, - _ => (), + if let ConflictReason::Links(ref link) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(dep.name()); + msg.push_str("` links to the native library `"); + msg.push_str(link); + msg.push_str("`, but it conflicts with a previous package which links to `"); + msg.push_str(link); + msg.push_str("` as well:\n"); } msg.push_str(&describe_path(p)); } @@ -1031,7 +1028,7 @@ impl<'r> Requirements<'r> { return Ok(()); } for f in self.summary.features().get(feat).expect("must be a valid feature") { - if f == &feat { + if f == feat { bail!("Cyclic feature dependency: feature `{}` depends on itself", feat); } self.add_feature(f)?; @@ -1095,7 +1092,7 @@ fn build_requirements<'a, 'b: 'a>(s: &'a Summary, method: &'b Method) } Method::Required { uses_default_features: false, .. } => {} } - return Ok(reqs); + Ok(reqs) } impl<'a> Context<'a> { @@ -1110,7 +1107,7 @@ impl<'a> Context<'a> { .entry(id.name().to_string()) .or_insert_with(HashMap::new) .entry(id.source_id().clone()) - .or_insert(Vec::new()); + .or_insert_with(Vec::new); if !prev.iter().any(|c| c == summary) { self.resolve_graph.push(GraphNode::Add(id.clone())); if let Some(link) = summary.links() { @@ -1294,7 +1291,7 @@ impl<'a> Context<'a> { let mut base = base.1; base.extend(dep.features().iter().cloned()); for feature in base.iter() { - if feature.contains("/") { + if feature.contains('/') { bail!("feature names may not contain slashes: `{}`", feature); } } diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index a372110cd..8ff156112 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -27,13 +27,13 @@ pub struct Shell { impl fmt::Debug for Shell { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self.err { - &ShellOut::Write(_) => { + match self.err { + ShellOut::Write(_) => { f.debug_struct("Shell") .field("verbosity", &self.verbosity) .finish() } - &ShellOut::Stream { color_choice, .. } => { + ShellOut::Stream { color_choice, .. } => { f.debug_struct("Shell") .field("verbosity", &self.verbosity) .field("color_choice", &color_choice) @@ -221,6 +221,12 @@ impl Shell { } } +impl Default for Shell { + fn default() -> Self { + Self::new() + } +} + impl ShellOut { /// Print out a message with a status. The status comes first and is bold + the given color. /// The status can be justified, in which case the max width that will right align is 12 chars. diff --git a/src/cargo/core/source/source_id.rs b/src/cargo/core/source/source_id.rs index 9135af41f..2bac7cd01 100644 --- a/src/cargo/core/source/source_id.rs +++ b/src/cargo/core/source/source_id.rs @@ -444,7 +444,7 @@ impl Hash for SourceId { } } -/// A `Display`able view into a SourceId that will write it as a url +/// A `Display`able view into a `SourceId` that will write it as a url pub struct SourceIdToUrl<'a> { inner: &'a SourceIdInner, } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index 63ecc7b3f..7a5ef6f3e 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -340,7 +340,7 @@ impl<'cfg> Workspace<'cfg> { match *self.packages.load(&ances_manifest_path)?.workspace_config() { WorkspaceConfig::Root(ref ances_root_config) => { debug!("find_root - found a root checking exclusion"); - if !ances_root_config.is_excluded(&manifest_path) { + if !ances_root_config.is_excluded(manifest_path) { debug!("find_root - found!"); return Ok(Some(ances_manifest_path)) } @@ -443,13 +443,10 @@ impl<'cfg> Workspace<'cfg> { return Ok(()) } - match *self.packages.load(root_manifest)?.workspace_config() { - WorkspaceConfig::Root(ref root_config) => { - if root_config.is_excluded(&manifest_path) { - return Ok(()) - } + if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() { + if root_config.is_excluded(&manifest_path) { + return Ok(()) } - _ => {} } debug!("find_members - {}", manifest_path.display()); diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index dc1dcd6a5..faf300b22 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -2,6 +2,11 @@ #![cfg_attr(test, deny(warnings))] #![recursion_limit="128"] +// Currently, Cargo does not use clippy for its source code. +// But if someone runs it they should know that +// @alexcrichton disagree with clippy on some style things +#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))] + #[macro_use] extern crate failure; #[macro_use] extern crate log; #[macro_use] extern crate scoped_tls; @@ -51,7 +56,7 @@ use core::shell::Verbosity::Verbose; pub use util::{CargoError, CargoResult, CliError, CliResult, Config}; pub use util::errors::Internal; -pub const CARGO_ENV: &'static str = "CARGO"; +pub const CARGO_ENV: &str = "CARGO"; pub mod core; pub mod ops; diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 026d4fdc6..7891ad304 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -321,7 +321,7 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, config, build_config, profiles, - exec)? + &exec)? }; ret.to_doc_test = to_builds.into_iter().cloned().collect(); diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 3a722de2f..9f21d1bd6 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -44,13 +44,11 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { or marking one of the targets as `doc = false`.", target.crate_name(), prev, package); } - } else { - if let Some(prev) = bin_names.insert(target.crate_name(), package) { - bail!("The binary `{}` is specified by packages `{}` and \ - `{}` but can be documented only once. Consider renaming \ - or marking one of the targets as `doc = false`.", - target.crate_name(), prev, package); - } + } else if let Some(prev) = bin_names.insert(target.crate_name(), package) { + bail!("The binary `{}` is specified by packages `{}` and \ + `{}` but can be documented only once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), prev, package); } } } diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 999a758fb..d08c11b16 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -174,10 +174,10 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) let mut changes = BTreeMap::new(); let empty = (Vec::new(), Vec::new()); for dep in previous_resolve.iter() { - changes.entry(key(dep)).or_insert(empty.clone()).0.push(dep); + changes.entry(key(dep)).or_insert_with(||empty.clone()).0.push(dep); } for dep in resolve.iter() { - changes.entry(key(dep)).or_insert(empty.clone()).1.push(dep); + changes.entry(key(dep)).or_insert_with(||empty.clone()).1.push(dep); } for v in changes.values_mut() { diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index c85923e45..f3cbdac45 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -286,7 +286,7 @@ fn install_one(root: &Filesystem, } } list.v1.entry(pkg.package_id().clone()) - .or_insert_with(|| BTreeSet::new()) + .or_insert_with(BTreeSet::new) .insert(bin.to_string()); } @@ -301,7 +301,7 @@ fn install_one(root: &Filesystem, // If installation was successful record newly installed binaries. if result.is_ok() { list.v1.entry(pkg.package_id().clone()) - .or_insert_with(|| BTreeSet::new()) + .or_insert_with(BTreeSet::new) .extend(to_install.iter().map(|s| s.to_string())); } @@ -347,7 +347,7 @@ fn select_pkg<'a, T>(mut source: T, // version range, otherwise parse it as a specific version let first = v.chars() .nth(0) - .ok_or(format_err!("no version provided for the `--vers` flag"))?; + .ok_or_else(||format_err!("no version provided for the `--vers` flag"))?; match first { '<' | '>' | '=' | '^' | '~' => match v.parse::() { @@ -570,20 +570,20 @@ pub fn uninstall(root: Option<&str>, specs: Vec<&str>, bins: &[String], config: &Config) -> CargoResult<()> { - if specs.len() > 1 && bins.len() > 0 { + if specs.len() > 1 && !bins.is_empty() { bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); } let root = resolve_root(root, config)?; let scheduled_error = if specs.len() == 1 { - uninstall_one(root, specs[0], bins, config)?; + uninstall_one(&root, specs[0], bins, config)?; false } else { let mut succeeded = vec![]; let mut failed = vec![]; for spec in specs { let root = root.clone(); - match uninstall_one(root, spec, bins, config) { + match uninstall_one(&root, spec, bins, config) { Ok(()) => succeeded.push(spec), Err(e) => { ::handle_error(e, &mut config.shell()); @@ -614,11 +614,11 @@ pub fn uninstall(root: Option<&str>, Ok(()) } -pub fn uninstall_one(root: Filesystem, +pub fn uninstall_one(root: &Filesystem, spec: &str, bins: &[String], config: &Config) -> CargoResult<()> { - let crate_metadata = metadata(config, &root)?; + let crate_metadata = metadata(config, root)?; let mut metadata = read_crate_list(&crate_metadata)?; let mut to_remove = Vec::new(); { diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index a1291e75b..fa370b47c 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -90,10 +90,9 @@ impl<'a> NewOptions<'a> { let kind = match (bin, lib) { (true, true) => bail!("can't specify both lib and binary outputs"), - (true, false) => NewProjectKind::Bin, (false, true) => NewProjectKind::Lib, // default to bin - (false, false) => NewProjectKind::Bin, + (_, false) => NewProjectKind::Bin, }; let opts = NewOptions { version_control, kind, path, name }; diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs index ff3c46b56..81ac77908 100644 --- a/src/cargo/ops/cargo_run.rs +++ b/src/cargo/ops/cargo_run.rs @@ -34,7 +34,7 @@ pub fn run(ws: &Workspace, .map(|bin| bin.name()) .collect(); - if bins.len() == 0 { + if bins.is_empty() { if !options.filter.is_specific() { bail!("a bin target must be available for `cargo run`") } else { diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 24985a190..a23e9fe03 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -471,7 +471,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Return the target triple which this context is targeting. pub fn target_triple(&self) -> &str { - self.requested_target().unwrap_or(self.host_triple()) + self.requested_target().unwrap_or_else(|| self.host_triple()) } /// Requested (not actual) target for the build @@ -694,8 +694,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { match *crate_type_info { Some((ref prefix, ref suffix)) => { let suffixes = add_target_specific_suffixes( - &self.target_triple(), - &crate_type, + self.target_triple(), + crate_type, unit.target.kind(), suffix, file_type, @@ -1055,10 +1055,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile { - if !target.is_custom_build() && !target.for_host() { - if unit.profile.check || (unit.profile.doc && !unit.profile.test) { + if !target.is_custom_build() && !target.for_host() + && (unit.profile.check || (unit.profile.doc && !unit.profile.test)) { return &self.profiles.check - } } self.lib_profile() } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index 6e7ff4b86..a7f3160ae 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -145,9 +145,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) match *cfg { Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); } Cfg::KeyPair(ref k, ref v) => { - match *cfg_map.entry(k.clone()).or_insert(Some(Vec::new())) { - Some(ref mut values) => values.push(v.clone()), - None => { /* ... */ } + if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(||Some(Vec::new())) { + values.push(v.clone()) } } } @@ -196,7 +195,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // track of whether it has told us about some explicit dependencies let prev_root_output = paths::read_bytes(&root_output_file) .and_then(|bytes| util::bytes2path(&bytes)) - .unwrap_or(cmd.get_cwd().unwrap().to_path_buf()); + .unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf()); let prev_output = BuildOutput::parse_file( &output_file, &pkg_name, @@ -273,7 +272,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // well. paths::write(&output_file, &output.stdout)?; paths::write(&err_file, &output.stderr)?; - paths::write(&root_output_file, &util::path2bytes(&root_output)?)?; + paths::write(&root_output_file, util::path2bytes(&root_output)?)?; let parsed_output = BuildOutput::parse( &output.stdout, &pkg_name, @@ -553,7 +552,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, } } - let prev = out.entry(*unit).or_insert(BuildScripts::default()); + let prev = out.entry(*unit).or_insert_with(BuildScripts::default); for (pkg, kind) in ret.to_link { add_to_link(prev, &pkg, kind); } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index fa4005486..c9ef8f245 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -319,9 +319,9 @@ impl hash::Hash for Fingerprint { profile, ref deps, ref local, - memoized_hash: _, epoch, ref rustflags, + .. } = *self; (rustc, features, target, path, profile, local, epoch, rustflags).hash(h); @@ -632,7 +632,7 @@ pub fn parse_dep_info(pkg: &Package, dep_info: &Path) .filter(|x| !x.is_empty()) .map(|p| util::bytes2path(p).map(|p| pkg.root().join(p))) .collect::, _>>()?; - if paths.len() == 0 { + if paths.is_empty() { Ok(None) } else { Ok(Some(paths)) diff --git a/src/cargo/ops/cargo_rustc/links.rs b/src/cargo/ops/cargo_rustc/links.rs index 5484a68b9..06461a46b 100644 --- a/src/cargo/ops/cargo_rustc/links.rs +++ b/src/cargo/ops/cargo_rustc/links.rs @@ -5,6 +5,7 @@ use core::{Resolve, PackageId}; use util::CargoResult; use super::Unit; +#[derive(Default)] pub struct Links<'a> { validated: HashSet<&'a PackageId>, links: HashMap, diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 6cfb6e1e4..5a05d45a6 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -139,7 +139,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, config: &'cfg Config, build_config: BuildConfig, profiles: &'a Profiles, - exec: Arc) + exec: &Arc) -> CargoResult> { let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| { let default_kind = if build_config.requested_target.is_some() { @@ -173,7 +173,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, // part of this, that's all done next as part of the `execute` // function which will run everything in order with proper // parallelism. - compile(&mut cx, &mut queue, unit, Arc::clone(&exec))?; + compile(&mut cx, &mut queue, unit, exec)?; } // Now that we've figured out everything that we're going to do, do it! @@ -199,7 +199,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, cx.compilation.binaries.push(bindst.clone()); } else if unit.target.is_lib() { let pkgid = unit.pkg.package_id().clone(); - cx.compilation.libraries.entry(pkgid).or_insert(HashSet::new()) + cx.compilation.libraries.entry(pkgid).or_insert_with(HashSet::new) .insert((unit.target.clone(), dst.clone())); } } @@ -210,7 +210,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, if dep.profile.run_custom_build { let out_dir = cx.build_script_out_dir(dep).display().to_string(); cx.compilation.extra_env.entry(dep.pkg.package_id().clone()) - .or_insert(Vec::new()) + .or_insert_with(Vec::new) .push(("OUT_DIR".to_string(), out_dir)); } @@ -220,7 +220,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, let v = cx.target_filenames(dep)?; cx.compilation.libraries .entry(unit.pkg.package_id().clone()) - .or_insert(HashSet::new()) + .or_insert_with(HashSet::new) .extend(v.iter().map(|&(ref f, _, _)| { (dep.target.clone(), f.clone()) })); @@ -232,7 +232,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, feats.iter().map(|feat| format!("feature=\"{}\"", feat)).collect() }); } - let rustdocflags = cx.rustdocflags_args(&unit)?; + let rustdocflags = cx.rustdocflags_args(unit)?; if !rustdocflags.is_empty() { cx.compilation.rustdocflags.entry(unit.pkg.package_id().clone()) .or_insert(rustdocflags); @@ -261,7 +261,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, jobs: &mut JobQueue<'a>, unit: &Unit<'a>, - exec: Arc) -> CargoResult<()> { + exec: &Arc) -> CargoResult<()> { if !cx.compiled.insert(*unit) { return Ok(()) } @@ -283,7 +283,7 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, let work = if unit.profile.doc { rustdoc(cx, unit)? } else { - rustc(cx, unit, Arc::clone(&exec))? + rustc(cx, unit, exec)? }; // Need to link targets on both the dirty and fresh let dirty = work.then(link_targets(cx, unit, false)?).then(dirty); @@ -300,7 +300,7 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, // Be sure to compile all dependencies of this target as well. for unit in cx.dep_targets(unit)?.iter() { - compile(cx, jobs, unit, exec.clone())?; + compile(cx, jobs, unit, exec)?; } Ok(()) @@ -308,7 +308,7 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>, - exec: Arc) -> CargoResult { + exec: &Arc) -> CargoResult { let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; let name = unit.pkg.name().to_string(); @@ -359,7 +359,7 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, let root_output = cx.target_root().to_path_buf(); let pkg_root = unit.pkg.root().to_path_buf(); - let cwd = rustc.get_cwd().unwrap_or(cx.config.cwd()).to_path_buf(); + let cwd = rustc.get_cwd().unwrap_or_else(|| cx.config.cwd()).to_path_buf(); return Ok(Work::new(move |state| { // Only at runtime have we discovered what the extra -L and -l @@ -747,7 +747,7 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, let Profile { ref opt_level, ref lto, codegen_units, ref rustc_args, debuginfo, debug_assertions, overflow_checks, rpath, test, doc: _doc, - run_custom_build, ref panic, rustdoc_args: _, check, incremental: _, + run_custom_build, ref panic, check, .. } = *unit.profile; assert!(!run_custom_build); @@ -935,8 +935,7 @@ fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder, // error in the future, see PR #4797 if !dep_targets.iter().any(|u| !u.profile.doc && u.target.linkable()) { if let Some(u) = dep_targets.iter() - .filter(|u| !u.profile.doc && u.target.is_lib()) - .next() { + .find(|u| !u.profile.doc && u.target.is_lib()) { cx.config.shell().warn(format!("The package `{}` \ provides no linkable target. The compiler might raise an error while compiling \ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ @@ -952,7 +951,7 @@ Cargo.toml. This warning might turn into a hard error in the future.", cmd.env("OUT_DIR", &cx.build_script_out_dir(&dep)); } if dep.target.linkable() && !dep.profile.doc { - link_to(cmd, cx, &unit, &dep)?; + link_to(cmd, cx, unit, &dep)?; } } diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 5f29d3e5e..fb776d61b 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -48,7 +48,7 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { bail!("cannot publish crates which activate nightly-only cargo features to crates.io") } - if let &Some(ref allowed_registries) = pkg.publish() { + if let Some(ref allowed_registries) = *pkg.publish() { if !match opts.registry { Some(ref registry) => allowed_registries.contains(registry), None => false, diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 87a156c99..f24909ef9 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -452,7 +452,8 @@ impl<'cfg> PathSource<'cfg> { // Skip dotfile directories if name.map(|s| s.starts_with('.')) == Some(true) { continue - } else if is_root { + } + if is_root { // Skip cargo artifacts match name { Some("target") | Some("Cargo.lock") => continue, diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 93ff9a866..319f2a9dd 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -177,10 +177,10 @@ use util::errors::CargoResultExt; use util::hex; use util::to_url::ToUrl; -const INDEX_LOCK: &'static str = ".cargo-index-lock"; -pub const CRATES_IO: &'static str = "https://github.com/rust-lang/crates.io-index"; -const CRATE_TEMPLATE: &'static str = "{crate}"; -const VERSION_TEMPLATE: &'static str = "{version}"; +const INDEX_LOCK: &str = ".cargo-index-lock"; +pub const CRATES_IO: &str = "https://github.com/rust-lang/crates.io-index"; +const CRATE_TEMPLATE: &str = "{crate}"; +const VERSION_TEMPLATE: &str = "{version}"; pub struct RegistrySource<'cfg> { source_id: SourceId, diff --git a/src/cargo/util/job.rs b/src/cargo/util/job.rs index be072a3eb..c9f1bef63 100644 --- a/src/cargo/util/job.rs +++ b/src/cargo/util/job.rs @@ -257,7 +257,7 @@ mod imp { killed = true; } - return killed + killed } } diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 90fdb978a..c2d290182 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -20,7 +20,7 @@ pub fn join_paths>(paths: &[T], env: &str) -> CargoResult &'static str { diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs index 6a8ceff9c..6fe6d7b9a 100644 --- a/src/cargo/util/process_builder.rs +++ b/src/cargo/util/process_builder.rs @@ -305,7 +305,7 @@ impl ProcessBuilder { } } -/// A helper function to create a ProcessBuilder. +/// A helper function to create a `ProcessBuilder`. pub fn process>(cmd: T) -> ProcessBuilder { ProcessBuilder { program: cmd.as_ref().to_os_string(), -- 2.30.2