use or_default instead of or_insert_with(Default::default)
We need to annotate some cases to allow the compile to infer the types. Fixes the clippy lint: ``` warning: use of `or_insert_with` to construct default value --> src/api2/tape/restore.rs:750:18 | 750 | .or_insert_with(Vec::new); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` | = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_or_default = note: `#[warn(clippy::unwrap_or_default)]` on by default ``` Signed-off-by: Maximiliano Sandoval <m.sandoval@proxmox.com>
This commit is contained in:
parent
7b61e06860
commit
b0a8752edd
@ -447,8 +447,8 @@ impl AclTree {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_node_config(node: &AclTreeNode, path: &str, w: &mut dyn Write) -> Result<(), Error> {
|
fn write_node_config(node: &AclTreeNode, path: &str, w: &mut dyn Write) -> Result<(), Error> {
|
||||||
let mut role_ug_map0 = HashMap::new();
|
let mut role_ug_map0: HashMap<_, BTreeSet<_>> = HashMap::new();
|
||||||
let mut role_ug_map1 = HashMap::new();
|
let mut role_ug_map1: HashMap<_, BTreeSet<_>> = HashMap::new();
|
||||||
|
|
||||||
for (auth_id, roles) in &node.users {
|
for (auth_id, roles) in &node.users {
|
||||||
// no need to save, because root is always 'Administrator'
|
// no need to save, because root is always 'Administrator'
|
||||||
@ -459,15 +459,9 @@ impl AclTree {
|
|||||||
let role = role.as_str();
|
let role = role.as_str();
|
||||||
let auth_id = auth_id.to_string();
|
let auth_id = auth_id.to_string();
|
||||||
if *propagate {
|
if *propagate {
|
||||||
role_ug_map1
|
role_ug_map1.entry(role).or_default().insert(auth_id);
|
||||||
.entry(role)
|
|
||||||
.or_insert_with(BTreeSet::new)
|
|
||||||
.insert(auth_id);
|
|
||||||
} else {
|
} else {
|
||||||
role_ug_map0
|
role_ug_map0.entry(role).or_default().insert(auth_id);
|
||||||
.entry(role)
|
|
||||||
.or_insert_with(BTreeSet::new)
|
|
||||||
.insert(auth_id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -476,15 +470,9 @@ impl AclTree {
|
|||||||
for (role, propagate) in roles {
|
for (role, propagate) in roles {
|
||||||
let group = format!("@{}", group);
|
let group = format!("@{}", group);
|
||||||
if *propagate {
|
if *propagate {
|
||||||
role_ug_map1
|
role_ug_map1.entry(role).or_default().insert(group);
|
||||||
.entry(role)
|
|
||||||
.or_insert_with(BTreeSet::new)
|
|
||||||
.insert(group);
|
|
||||||
} else {
|
} else {
|
||||||
role_ug_map0
|
role_ug_map0.entry(role).or_default().insert(group);
|
||||||
.entry(role)
|
|
||||||
.or_insert_with(BTreeSet::new)
|
|
||||||
.insert(group);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -492,7 +480,7 @@ impl AclTree {
|
|||||||
fn group_by_property_list(
|
fn group_by_property_list(
|
||||||
item_property_map: &HashMap<&str, BTreeSet<String>>,
|
item_property_map: &HashMap<&str, BTreeSet<String>>,
|
||||||
) -> BTreeMap<String, BTreeSet<String>> {
|
) -> BTreeMap<String, BTreeSet<String>> {
|
||||||
let mut result_map = BTreeMap::new();
|
let mut result_map: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
|
||||||
for (item, property_map) in item_property_map {
|
for (item, property_map) in item_property_map {
|
||||||
let item_list = property_map.iter().fold(String::new(), |mut acc, v| {
|
let item_list = property_map.iter().fold(String::new(), |mut acc, v| {
|
||||||
if !acc.is_empty() {
|
if !acc.is_empty() {
|
||||||
@ -503,7 +491,7 @@ impl AclTree {
|
|||||||
});
|
});
|
||||||
result_map
|
result_map
|
||||||
.entry(item_list)
|
.entry(item_list)
|
||||||
.or_insert_with(BTreeSet::new)
|
.or_default()
|
||||||
.insert(item.to_string());
|
.insert(item.to_string());
|
||||||
}
|
}
|
||||||
result_map
|
result_map
|
||||||
|
@ -75,7 +75,7 @@ impl TryFrom<Vec<String>> for NamespaceMap {
|
|||||||
let max_depth = mapping.max_depth.unwrap_or(MAX_NAMESPACE_DEPTH);
|
let max_depth = mapping.max_depth.unwrap_or(MAX_NAMESPACE_DEPTH);
|
||||||
|
|
||||||
let ns_map: &mut HashMap<BackupNamespace, (BackupNamespace, usize)> =
|
let ns_map: &mut HashMap<BackupNamespace, (BackupNamespace, usize)> =
|
||||||
map.entry(mapping.store).or_insert_with(HashMap::new);
|
map.entry(mapping.store).or_default();
|
||||||
|
|
||||||
if ns_map.insert(source, (target, max_depth)).is_some() {
|
if ns_map.insert(source, (target, max_depth)).is_some() {
|
||||||
bail!("duplicate mapping found");
|
bail!("duplicate mapping found");
|
||||||
@ -747,7 +747,7 @@ fn restore_list_worker(
|
|||||||
|
|
||||||
let file_list = snapshot_file_hash
|
let file_list = snapshot_file_hash
|
||||||
.entry(media_id.label.uuid.clone())
|
.entry(media_id.label.uuid.clone())
|
||||||
.or_insert_with(Vec::new);
|
.or_default();
|
||||||
file_list.push(file_num);
|
file_list.push(file_num);
|
||||||
|
|
||||||
task_log!(
|
task_log!(
|
||||||
@ -808,10 +808,8 @@ fn restore_list_worker(
|
|||||||
// we only want to restore chunks that we do not have yet
|
// we only want to restore chunks that we do not have yet
|
||||||
if !datastore.cond_touch_chunk(&digest, false)? {
|
if !datastore.cond_touch_chunk(&digest, false)? {
|
||||||
if let Some((uuid, nr)) = catalog.lookup_chunk(&source_datastore, &digest) {
|
if let Some((uuid, nr)) = catalog.lookup_chunk(&source_datastore, &digest) {
|
||||||
let file = media_file_chunk_map
|
let file = media_file_chunk_map.entry(uuid.clone()).or_default();
|
||||||
.entry(uuid.clone())
|
let chunks = file.entry(nr).or_default();
|
||||||
.or_insert_with(BTreeMap::new);
|
|
||||||
let chunks = file.entry(nr).or_insert_with(HashSet::new);
|
|
||||||
chunks.insert(digest);
|
chunks.insert(digest);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1089,9 +1087,7 @@ fn restore_snapshots_to_tmpdir(
|
|||||||
);
|
);
|
||||||
std::fs::create_dir_all(&tmp_path)?;
|
std::fs::create_dir_all(&tmp_path)?;
|
||||||
|
|
||||||
let chunks = chunks_list
|
let chunks = chunks_list.entry(source_datastore).or_default();
|
||||||
.entry(source_datastore)
|
|
||||||
.or_insert_with(HashSet::new);
|
|
||||||
let manifest =
|
let manifest =
|
||||||
try_restore_snapshot_archive(worker.clone(), &mut decoder, &tmp_path)?;
|
try_restore_snapshot_archive(worker.clone(), &mut decoder, &tmp_path)?;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user