Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

another round of clippy lint fixes #9051

Merged
merged 3 commits into from
Jan 6, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions crates/cargo-test-support/src/paths.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,10 +127,8 @@ impl CargoPathExt for Path {
if let Err(e) = remove_dir_all::remove_dir_all(self) {
panic!("failed to remove {:?}: {:?}", self, e)
}
} else {
if let Err(e) = fs::remove_file(self) {
panic!("failed to remove {:?}: {:?}", self, e)
}
} else if let Err(e) = fs::remove_file(self) {
panic!("failed to remove {:?}: {:?}", self, e)
}
}

Expand Down
8 changes: 1 addition & 7 deletions src/cargo/core/compiler/build_context/target_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -408,13 +408,7 @@ impl TargetInfo {

let error = str::from_utf8(&output.stderr).unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
Ok(parse_crate_type(
crate_type,
&process,
output,
error,
&mut output.lines(),
)?)
parse_crate_type(crate_type, &process, output, error, &mut output.lines())
}

/// Returns all the file types generated by rustc for the given mode/target_kind.
Expand Down
8 changes: 4 additions & 4 deletions src/cargo/core/compiler/build_plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
//! dependencies on other Invocations.

use std::collections::BTreeMap;
use std::path::PathBuf;
use std::path::{Path, PathBuf};

use serde::Serialize;

Expand Down Expand Up @@ -63,10 +63,10 @@ impl Invocation {
}
}

pub fn add_output(&mut self, path: &PathBuf, link: &Option<PathBuf>) {
self.outputs.push(path.clone());
pub fn add_output(&mut self, path: &Path, link: &Option<PathBuf>) {
self.outputs.push(path.to_path_buf());
if let Some(ref link) = *link {
self.links.insert(link.clone(), path.clone());
self.links.insert(link.clone(), path.to_path_buf());
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/cargo/core/compiler/custom_build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -750,7 +750,7 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> {

// Load any dependency declarations from a previous run.
if unit.mode.is_run_custom_build() {
parse_previous_explicit_deps(cx, unit)?;
parse_previous_explicit_deps(cx, unit);
}

// We want to invoke the compiler deterministically to be cache-friendly
Expand Down Expand Up @@ -787,13 +787,12 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> {
}
}

fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> {
fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) {
let script_run_dir = cx.files().build_script_run_dir(unit);
let output_file = script_run_dir.join("output");
let (prev_output, _) = prev_build_output(cx, unit);
let deps = BuildDeps::new(&output_file, prev_output.as_ref());
cx.build_explicit_deps.insert(unit.clone(), deps);
Ok(())
}
}

Expand Down
14 changes: 6 additions & 8 deletions src/cargo/core/compiler/job_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -780,14 +780,12 @@ impl<'cfg> DrainState<'cfg> {
if err_state.is_some() {
// Already encountered one error.
log::warn!("{:?}", new_err);
} else if !self.active.is_empty() {
crate::display_error(&new_err, shell);
drop(shell.warn("build failed, waiting for other jobs to finish..."));
*err_state = Some(anyhow::format_err!("build failed"));
} else {
if !self.active.is_empty() {
crate::display_error(&new_err, shell);
drop(shell.warn("build failed, waiting for other jobs to finish..."));
*err_state = Some(anyhow::format_err!("build failed"));
} else {
*err_state = Some(new_err);
}
*err_state = Some(new_err);
}
}

Expand Down Expand Up @@ -917,7 +915,7 @@ impl<'cfg> DrainState<'cfg> {
// thread to run the job.
doit(JobState {
id,
messages: messages.clone(),
messages,
output: Some(cx.bcx.config),
rmeta_required: Cell::new(rmeta_required),
_marker: marker::PhantomData,
Expand Down
35 changes: 13 additions & 22 deletions src/cargo/core/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::{self, File};
use std::io::{BufRead, Write};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;

use anyhow::Error;
Expand Down Expand Up @@ -276,7 +276,7 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> Car
)?;
add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?;
}
add_custom_env(&mut rustc, &script_outputs, current_id, script_metadata)?;
add_custom_env(&mut rustc, &script_outputs, current_id, script_metadata);
}

for output in outputs.iter() {
Expand Down Expand Up @@ -396,17 +396,14 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> Car
build_script_outputs: &BuildScriptOutputs,
current_id: PackageId,
metadata: Option<Metadata>,
) -> CargoResult<()> {
let metadata = match metadata {
Some(metadata) => metadata,
None => return Ok(()),
};
if let Some(output) = build_script_outputs.get(current_id, metadata) {
for &(ref name, ref value) in output.env.iter() {
rustc.env(name, value);
) {
if let Some(metadata) = metadata {
if let Some(output) = build_script_outputs.get(current_id, metadata) {
for &(ref name, ref value) in output.env.iter() {
rustc.env(name, value);
}
}
}
Ok(())
}
}

Expand Down Expand Up @@ -491,7 +488,7 @@ fn add_plugin_deps(
rustc: &mut ProcessBuilder,
build_script_outputs: &BuildScriptOutputs,
build_scripts: &BuildScripts,
root_output: &PathBuf,
root_output: &Path,
) -> CargoResult<()> {
let var = util::dylib_path_envvar();
let search_path = rustc.get_env(var).unwrap_or_default();
Expand All @@ -515,7 +512,7 @@ fn add_plugin_deps(
// Strip off prefixes like "native=" or "framework=" and filter out directories
// **not** inside our output directory since they are likely spurious and can cause
// clashes with system shared libraries (issue #3366).
fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec<PathBuf>
fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec<PathBuf>
where
I: Iterator<Item = &'a PathBuf>,
{
Expand Down Expand Up @@ -603,7 +600,7 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
}

add_error_format_and_color(cx, &mut rustdoc, false)?;
add_error_format_and_color(cx, &mut rustdoc, false);

if let Some(args) = cx.bcx.extra_args_for(unit) {
rustdoc.args(args);
Expand Down Expand Up @@ -725,11 +722,7 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuild
/// intercepting messages like rmeta artifacts, etc. rustc includes a
/// "rendered" field in the JSON message with the message properly formatted,
/// which Cargo will extract and display to the user.
fn add_error_format_and_color(
cx: &Context<'_, '_>,
cmd: &mut ProcessBuilder,
pipelined: bool,
) -> CargoResult<()> {
fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, pipelined: bool) {
cmd.arg("--error-format=json");
let mut json = String::from("--json=diagnostic-rendered-ansi");
if pipelined {
Expand Down Expand Up @@ -764,8 +757,6 @@ fn add_error_format_and_color(
_ => (),
}
}

Ok(())
}

fn build_base_args(
Expand Down Expand Up @@ -799,7 +790,7 @@ fn build_base_args(
}

add_path_args(bcx, unit, cmd);
add_error_format_and_color(cx, cmd, cx.rmeta_required(unit))?;
add_error_format_and_color(cx, cmd, cx.rmeta_required(unit));

if !test {
for crate_type in crate_types.iter() {
Expand Down
5 changes: 2 additions & 3 deletions src/cargo/core/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -607,9 +607,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
/// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
/// the package is ready and doesn't need to be downloaded.
pub fn start(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
Ok(self
.start_inner(id)
.chain_err(|| format!("failed to download `{}`", id))?)
self.start_inner(id)
.chain_err(|| format!("failed to download `{}`", id))
}

fn start_inner(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
Expand Down
5 changes: 2 additions & 3 deletions src/cargo/core/profiles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ impl Profiles {
requested_profile,
};

Self::add_root_profiles(&mut profile_makers, &profiles)?;
Self::add_root_profiles(&mut profile_makers, &profiles);

// Merge with predefined profiles.
use std::collections::btree_map::Entry;
Expand Down Expand Up @@ -143,7 +143,7 @@ impl Profiles {
fn add_root_profiles(
profile_makers: &mut Profiles,
profiles: &BTreeMap<InternedString, TomlProfile>,
) -> CargoResult<()> {
) {
profile_makers.by_name.insert(
InternedString::new("dev"),
ProfileMaker::new(Profile::default_dev(), profiles.get("dev").cloned()),
Expand All @@ -153,7 +153,6 @@ impl Profiles {
InternedString::new("release"),
ProfileMaker::new(Profile::default_release(), profiles.get("release").cloned()),
);
Ok(())
}

/// Returns the built-in profiles (not including dev/release, which are
Expand Down
11 changes: 9 additions & 2 deletions src/cargo/core/summary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl Summary {
if !weak_dep_features {
for (feat_name, features) in self.features() {
for fv in features {
if matches!(fv, FeatureValue::DepFeature{weak: true, ..}) {
if matches!(fv, FeatureValue::DepFeature { weak: true, .. }) {
bail!(
"optional dependency features with `?` syntax are only \
allowed on the nightly channel and requires the \
Expand Down Expand Up @@ -416,7 +416,14 @@ impl FeatureValue {

/// Returns `true` if this feature explicitly used `dep:` syntax.
pub fn has_dep_prefix(&self) -> bool {
matches!(self, FeatureValue::Dep{..} | FeatureValue::DepFeature{dep_prefix:true, ..})
matches!(
self,
FeatureValue::Dep { .. }
| FeatureValue::DepFeature {
dep_prefix: true,
..
}
)
}
}

Expand Down
14 changes: 7 additions & 7 deletions src/cargo/core/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -437,14 +437,14 @@ impl<'cfg> Workspace<'cfg> {
/// Returns an error if `manifest_path` isn't actually a valid manifest or
/// if some other transient error happens.
fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> {
fn read_root_pointer(member_manifest: &Path, root_link: &str) -> PathBuf {
let path = member_manifest
.parent()
.unwrap()
.join(root_link)
.join("Cargo.toml");
debug!("find_root - pointer {}", path.display());
Ok(paths::normalize_path(&path))
paths::normalize_path(&path)
}

{
Expand All @@ -456,7 +456,7 @@ impl<'cfg> Workspace<'cfg> {
}
WorkspaceConfig::Member {
root: Some(ref path_to_root),
} => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)),
} => return Ok(Some(read_root_pointer(manifest_path, path_to_root))),
WorkspaceConfig::Member { root: None } => {}
}
}
Expand All @@ -481,7 +481,7 @@ impl<'cfg> Workspace<'cfg> {
root: Some(ref path_to_root),
} => {
debug!("find_root - found pointer");
return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?));
return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)));
}
WorkspaceConfig::Member { .. } => {}
}
Expand Down Expand Up @@ -957,7 +957,7 @@ impl<'cfg> Workspace<'cfg> {
if self.allows_new_cli_feature_behavior() {
self.members_with_features_new(specs, requested_features)
} else {
self.members_with_features_old(specs, requested_features)
Ok(self.members_with_features_old(specs, requested_features))
}
}

Expand Down Expand Up @@ -1067,7 +1067,7 @@ impl<'cfg> Workspace<'cfg> {
&self,
specs: &[PackageIdSpec],
requested_features: &RequestedFeatures,
) -> CargoResult<Vec<(&Package, RequestedFeatures)>> {
) -> Vec<(&Package, RequestedFeatures)> {
// Split off any features with the syntax `member-name/feature-name` into a map
// so that those features can be applied directly to those workspace-members.
let mut member_specific_features: HashMap<&str, BTreeSet<InternedString>> = HashMap::new();
Expand Down Expand Up @@ -1131,7 +1131,7 @@ impl<'cfg> Workspace<'cfg> {
}
}
});
Ok(ms.collect())
ms.collect()
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/cargo/ops/cargo_compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ pub fn compile_ws<'a>(
let bcx = create_bcx(ws, options, &interner)?;
if options.build_config.unit_graph {
unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph)?;
return Ok(Compilation::new(&bcx)?);
return Compilation::new(&bcx);
}

let _p = profile::start("compiling");
Expand Down
Loading