rust: Fix clippy warnings

Mostly straightforward stuff. It taught me about the `matches!` macro,
which looks really useful.

Wanted to turn this on in CI, but there's still a bunch of clippy
warnings coming from the `cxx.rs` stuff and some of our unsafe blocks.
For example, it wants the `files` arg in `initramfs_overlay_generate` to
be `&[String]` instead of `&Vec<String>` but that would break cxx.rs (it
looks like cxx.rs does support slices, but it would require creating one
from the vector we have to create anyway).
This commit is contained in:
Jonathan Lebon 2021-01-22 14:19:23 -05:00 committed by OpenShift Merge Robot
parent 15a32c12d6
commit 49318cd950
13 changed files with 34 additions and 49 deletions

View File

@ -64,7 +64,7 @@ pub(crate) fn cliwrap_entrypoint(args: Vec<String>) -> CxxResult<()> {
"rpm" => Ok(self::rpm::main(&args)?),
"dracut" => Ok(self::dracut::main(&args)?),
"grubby" => Ok(self::grubby::main(&args)?),
_ => return Err(anyhow!("Unknown wrapped binary: {}", name).into()),
_ => Err(anyhow!("Unknown wrapped binary: {}", name).into()),
}
}
}

View File

@ -6,7 +6,6 @@
use crate::cxxrsutil::CxxResult;
use anyhow::Result;
use openat;
use openat_ext::OpenatDirExt;
use rayon::prelude::*;
use std::io;

View File

@ -11,7 +11,7 @@ const BODHI_UPDATE_PREFIX: &str = "FEDORA-";
lazy_static::lazy_static! {
/// See https://github.com/cgwalters/koji-sane-json-api
static ref KOJI_JSON_API_HOST: String = {
std::env::var("RPMOSTREE_KOJI_JSON_API_HOST").ok().unwrap_or("kojiproxy-coreos.svc.ci.openshift.org".to_string())
std::env::var("RPMOSTREE_KOJI_JSON_API_HOST").ok().unwrap_or_else(|| "kojiproxy-coreos.svc.ci.openshift.org".to_string())
};
}

View File

@ -5,10 +5,6 @@
*/
use c_utf8::CUtf8;
use gio_sys;
use glib_sys;
use libc;
use openat;
use std::ffi::CString;
use std::ffi::{CStr, OsStr};
use std::fmt::Display;

View File

@ -119,7 +119,7 @@ impl HistoryEntry {
first_boot_timestamp: boot.timestamp,
last_boot_timestamp: boot.timestamp,
deploy_timestamp: deploy.timestamp,
deploy_cmdline: deploy.cmdline.map(|s| s.clone()).unwrap_or_default(),
deploy_cmdline: deploy.cmdline.unwrap_or_default(),
boot_count: 1,
eof: false,
}

View File

@ -44,7 +44,7 @@ fn impl_journal_print_staging_failure() -> Result<()> {
// Now, go backwards until we hit the first entry from the previous boot. In theory that should
// just be a single `sd_journal_previous()` call, but we need a loop here, see:
// https://github.com/systemd/systemd/commit/dc00966228ff90c554fd034e588ea55eb605ec52
let mut previous_boot_id: Id128 = boot_id.clone();
let mut previous_boot_id: Id128 = boot_id;
while previous_boot_id == boot_id {
match j.previous_entry()? {
Some(_) => previous_boot_id = j.monotonic_timestamp()?.1,

View File

@ -346,7 +346,11 @@ pub(crate) fn transaction_apply_live(
target: &str,
) -> CxxResult<()> {
let sysroot = &sysroot.gobj_wrap();
let target = if target.len() > 0 { Some(target) } else { None };
let target = if !target.is_empty() {
Some(target)
} else {
None
};
let repo = &sysroot.repo().expect("repo");
let booted = if let Some(b) = sysroot.get_booted_deployment() {
@ -411,14 +415,13 @@ pub(crate) fn transaction_apply_live(
}
if let Some(ref state) = state {
if !state.inprogress.is_empty() {
if state.inprogress.as_str() != target_commit {
Err(anyhow::anyhow!(
"Previously interrupted while targeting commit {}, cannot change target to {}",
state.inprogress,
target_commit
))?;
}
if !state.inprogress.is_empty() && state.inprogress.as_str() != target_commit {
return Err(anyhow::anyhow!(
"Previously interrupted while targeting commit {}, cannot change target to {}",
state.inprogress,
target_commit
)
.into());
}
}

View File

@ -13,7 +13,6 @@ use anyhow::Result;
use chrono::prelude::*;
use openat_ext::OpenatDirExt;
use serde_derive::{Deserialize, Serialize};
use serde_json;
use std::collections::{BTreeMap, HashMap};
use std::convert::TryInto;
use std::io;
@ -195,8 +194,6 @@ mod ffi {
use crate::includes::*;
use glib::translate::*;
use glib::GString;
use glib_sys;
use libc;
use libdnf_sys::*;
use std::ptr;
@ -216,7 +213,10 @@ mod ffi {
let map = lockfile.packages.into_iter().fold(
HashMap::<String, String>::new(),
|mut acc, (k, v)| {
acc.insert(format!("{}-{}", k, v.evra), v.digest.unwrap_or("".into()));
acc.insert(
format!("{}-{}", k, v.evra),
v.digest.unwrap_or_else(|| "".into()),
);
acc
},
);
@ -293,7 +293,8 @@ mod ffi {
int_glib_error(
|| -> Result<()> {
let lockfile_dir = openat::Dir::open(filename.parent().unwrap_or(Path::new("/")))?;
let lockfile_dir =
openat::Dir::open(filename.parent().unwrap_or_else(|| Path::new("/")))?;
let basename = filename.file_name().expect("filename");
lockfile_dir.write_file_with(basename, 0o644, |w| -> Result<()> {
Ok(serde_json::to_writer_pretty(w, &lockfile)?)

View File

@ -90,10 +90,7 @@ fn diff_recurse(
let to_child = to.get_child(&name).expect("child");
let to_info = query_info_optional(&to_child, queryattrs, queryflags)
.context("querying optional to")?;
let is_dir = match from_info.get_file_type() {
gio::FileType::Directory => true,
_ => false,
};
let is_dir = matches!(from_info.get_file_type(), gio::FileType::Directory);
if to_info.is_some() {
let to_child = to_child.downcast::<ostree::RepoFile>().expect("downcast");
to_child.ensure_resolved()?;
@ -120,12 +117,10 @@ fn diff_recurse(
diff.changed_files.insert(path);
}
}
} else if is_dir {
diff.removed_dirs.insert(path);
} else {
if is_dir {
diff.removed_dirs.insert(path);
} else {
diff.removed_files.insert(path);
}
diff.removed_files.insert(path);
}
}
// Iterate over the target (to) directory, and find any
@ -141,10 +136,7 @@ fn diff_recurse(
if from_info.is_some() {
continue;
}
let is_dir = match to_info.get_file_type() {
gio::FileType::Directory => true,
_ => false,
};
let is_dir = matches!(to_info.get_file_type(), gio::FileType::Directory);
if is_dir {
diff.added_dirs.insert(path);
} else {

View File

@ -26,7 +26,7 @@ static PWGRP_LOCK_AND_BACKUP_FILES: &[&str] = &[
/// Populate a new DB with content from `passwd` and `group` files.
pub fn passwddb_open(rootfs: i32) -> Result<Box<PasswdDB>> {
let fd = ffiutil::ffi_view_openat_dir(rootfs);
PasswdDB::populate_new(&fd).map(|db| Box::new(db))
PasswdDB::populate_new(&fd).map(Box::new)
}
/// Prepare passwd content before layering RPMs.

View File

@ -11,7 +11,6 @@
use crate::cxxrsutil::*;
use anyhow::{Context, Result};
use openat;
use openat_ext::{FileExt, OpenatDirExt};
use rand::Rng;
use std::fs;
@ -182,7 +181,10 @@ fn update_os_tree(opts: &SyntheticUpgradeOpts) -> Result<()> {
}
assert!(mutated > 0);
println!("Mutated ELF files: {}", mutated);
let src_ref = opts.src_ref.as_deref().unwrap_or(opts.ostref.as_str());
let src_ref = opts
.src_ref
.as_deref()
.unwrap_or_else(|| opts.ostref.as_str());
let mut cmd = Command::new("ostree");
cmd.arg(format!("--repo={}", repo_path.to_str().unwrap()))
.args(&["commit", "--consume", "-b"])

View File

@ -10,9 +10,7 @@
use anyhow::{anyhow, bail, Result};
use c_utf8::CUtf8Buf;
use openat;
use serde_derive::{Deserialize, Serialize};
use serde_json;
use std::collections::btree_map::Entry;
use std::collections::{BTreeMap, HashMap};
use std::io::prelude::*;
@ -536,7 +534,7 @@ impl Treefile {
}
/// Generate a rojig spec file.
fn write_rojig_spec<'a, 'b>(workdir: &'a openat::Dir, r: &'b Rojig) -> Result<CUtf8Buf> {
fn write_rojig_spec(workdir: &openat::Dir, r: &Rojig) -> Result<CUtf8Buf> {
let description = r
.description
.as_ref()
@ -547,7 +545,7 @@ impl Treefile {
None
}
})
.unwrap_or(r.summary.as_str());
.unwrap_or_else(|| r.summary.as_str());
let name: String = format!("{}.spec", r.name);
{
let mut f = workdir.write_file(name.as_str(), 0o644)?;
@ -1393,8 +1391,6 @@ etc-group-members:
mod ffi {
use super::*;
use glib::translate::*;
use glib_sys;
use libc;
use std::io::Seek;
use std::os::unix::io::{AsRawFd, RawFd};
use std::{fs, io, ptr};

View File

@ -11,13 +11,9 @@ use std::io::prelude::*;
use std::os::unix::io::IntoRawFd;
use std::path::Path;
use std::{fs, io};
use tempfile;
use curl::easy::Easy;
use serde_json;
use serde_yaml;
#[derive(PartialEq)]
/// Supported config serialization used by treefile and lockfile
pub enum InputFormat {