cargo fmt

bigger changes are only in the new crates
rest are minor ones

Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
Wolfgang Bumiller 2023-06-23 11:50:38 +02:00
parent b687edc1a0
commit d5b9f166a2
7 changed files with 122 additions and 86 deletions

View File

@ -245,7 +245,10 @@ impl FileReferenceType {
}
pub fn is_package_index(&self) -> bool {
matches!(self, FileReferenceType::Packages(_, _) | FileReferenceType::Sources(_))
matches!(
self,
FileReferenceType::Packages(_, _) | FileReferenceType::Sources(_)
)
}
}

View File

@ -1,6 +1,6 @@
use std::collections::HashMap;
use anyhow::{bail, Error, format_err};
use anyhow::{bail, format_err, Error};
use rfc822_like::de::Deserializer;
use serde::Deserialize;
use serde_json::Value;
@ -26,8 +26,6 @@ use super::CheckSums;
//
//Files (mandatory)
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct SourcesFileRaw {
@ -104,8 +102,17 @@ impl TryFrom<SourcesFileRaw> for SourcePackageEntry {
for file_reference in value.files.lines() {
let (file_name, size, md5) = parse_file_reference(file_reference, 16)?;
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| SourcePackageFileReference { file: file_name, size, checksums: CheckSums::default()});
entry.checksums.md5 = Some(md5.try_into().map_err(|_|format_err!("unexpected checksum length"))?);
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| {
SourcePackageFileReference {
file: file_name,
size,
checksums: CheckSums::default(),
}
});
entry.checksums.md5 = Some(
md5.try_into()
.map_err(|_| format_err!("unexpected checksum length"))?,
);
if entry.size != size {
bail!("Size mismatch: {} != {}", entry.size, size);
}
@ -114,8 +121,18 @@ impl TryFrom<SourcesFileRaw> for SourcePackageEntry {
if let Some(sha256) = value.sha256 {
for line in sha256.lines() {
let (file_name, size, sha256) = parse_file_reference(line, 32)?;
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| SourcePackageFileReference { file: file_name, size, checksums: CheckSums::default()});
entry.checksums.sha256 = Some(sha256.try_into().map_err(|_|format_err!("unexpected checksum length"))?);
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| {
SourcePackageFileReference {
file: file_name,
size,
checksums: CheckSums::default(),
}
});
entry.checksums.sha256 = Some(
sha256
.try_into()
.map_err(|_| format_err!("unexpected checksum length"))?,
);
if entry.size != size {
bail!("Size mismatch: {} != {}", entry.size, size);
}
@ -125,8 +142,18 @@ impl TryFrom<SourcesFileRaw> for SourcePackageEntry {
if let Some(sha512) = value.sha512 {
for line in sha512.lines() {
let (file_name, size, sha512) = parse_file_reference(line, 64)?;
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| SourcePackageFileReference { file: file_name, size, checksums: CheckSums::default()});
entry.checksums.sha512 = Some(sha512.try_into().map_err(|_|format_err!("unexpected checksum length"))?);
let entry = parsed.files.entry(file_name.clone()).or_insert_with(|| {
SourcePackageFileReference {
file: file_name,
size,
checksums: CheckSums::default(),
}
});
entry.checksums.sha512 = Some(
sha512
.try_into()
.map_err(|_| format_err!("unexpected checksum length"))?,
);
if entry.size != size {
bail!("Size mismatch: {} != {}", entry.size, size);
}
@ -135,10 +162,7 @@ impl TryFrom<SourcesFileRaw> for SourcePackageEntry {
for (file_name, reference) in &parsed.files {
if !reference.checksums.is_secure() {
bail!(
"no strong checksum found for source entry '{}'",
file_name
);
bail!("no strong checksum found for source entry '{}'", file_name);
}
}
@ -177,10 +201,7 @@ impl TryFrom<Vec<SourcesFileRaw>> for SourcesFile {
}
}
fn parse_file_reference(
line: &str,
csum_len: usize,
) -> Result<(String, usize, Vec<u8>), Error> {
fn parse_file_reference(line: &str, csum_len: usize) -> Result<(String, usize, Vec<u8>), Error> {
let mut split = line.split_ascii_whitespace();
let checksum = split
@ -226,7 +247,11 @@ pub fn test_deb_packages_file() {
assert_eq!(parsed.source_packages.len(), 1558);
let found = parsed.source_packages.iter().find(|source| source.package == "base-files").expect("test file contains 'base-files' entry");
let found = parsed
.source_packages
.iter()
.find(|source| source.package == "base-files")
.expect("test file contains 'base-files' entry");
assert_eq!(found.package, "base-files");
assert_eq!(found.format, "3.0 (native)");
assert_eq!(found.architecture.as_deref(), Some("any"));
@ -234,7 +259,10 @@ pub fn test_deb_packages_file() {
assert_eq!(found.section.as_deref(), Some("admin"));
assert_eq!(found.version, "11.1+deb11u5");
let binary_packages = found.binary.as_ref().expect("base-files source package builds base-files binary package");
let binary_packages = found
.binary
.as_ref()
.expect("base-files source package builds base-files binary package");
assert_eq!(binary_packages.len(), 1);
assert_eq!(binary_packages[0], "base-files");
@ -242,16 +270,32 @@ pub fn test_deb_packages_file() {
assert_eq!(references.len(), 2);
let dsc_file = "base-files_11.1+deb11u5.dsc";
let dsc = references.get(dsc_file).expect("base-files source package contains 'dsc' reference");
let dsc = references
.get(dsc_file)
.expect("base-files source package contains 'dsc' reference");
assert_eq!(dsc.file, dsc_file);
assert_eq!(dsc.size, 1110);
assert_eq!(dsc.checksums.md5.expect("dsc has md5 checksum"), hex::decode("741c34ac0151262a03de8d5a07bc4271").unwrap()[..]);
assert_eq!(dsc.checksums.sha256.expect("dsc has sha256 checksum"), hex::decode("c41a7f00d57759f27e6068240d1ea7ad80a9a752e4fb43850f7e86e967422bd3").unwrap()[..]);
assert_eq!(
dsc.checksums.md5.expect("dsc has md5 checksum"),
hex::decode("741c34ac0151262a03de8d5a07bc4271").unwrap()[..]
);
assert_eq!(
dsc.checksums.sha256.expect("dsc has sha256 checksum"),
hex::decode("c41a7f00d57759f27e6068240d1ea7ad80a9a752e4fb43850f7e86e967422bd3").unwrap()[..]
);
let tar_file = "base-files_11.1+deb11u5.tar.xz";
let tar = references.get(tar_file).expect("base-files source package contains 'tar' reference");
let tar = references
.get(tar_file)
.expect("base-files source package contains 'tar' reference");
assert_eq!(tar.file, tar_file);
assert_eq!(tar.size, 65612);
assert_eq!(tar.checksums.md5.expect("tar has md5 checksum"), hex::decode("995df33642118b566a4026410e1c6aac").unwrap()[..]);
assert_eq!(tar.checksums.sha256.expect("tar has sha256 checksum"), hex::decode("31c9e5745845a73f3d5c8a7868c379d77aaca42b81194679d7ab40cc28e3a0e9").unwrap()[..]);
assert_eq!(
tar.checksums.md5.expect("tar has md5 checksum"),
hex::decode("995df33642118b566a4026410e1c6aac").unwrap()[..]
);
assert_eq!(
tar.checksums.sha256.expect("tar has sha256 checksum"),
hex::decode("31c9e5745845a73f3d5c8a7868c379d77aaca42b81194679d7ab40cc28e3a0e9").unwrap()[..]
);
}

View File

@ -88,7 +88,9 @@ impl Display for APTRepositoryHandle {
APTRepositoryHandle::NoSubscription => write!(f, "no-subscription"),
APTRepositoryHandle::Test => write!(f, "test"),
APTRepositoryHandle::CephQuincyEnterprise => write!(f, "ceph-quincy-enterprise"),
APTRepositoryHandle::CephQuincyNoSubscription => write!(f, "ceph-quincy-no-subscription"),
APTRepositoryHandle::CephQuincyNoSubscription => {
write!(f, "ceph-quincy-no-subscription")
}
APTRepositoryHandle::CephQuincyTest => write!(f, "ceph-quincy-test"),
}
}
@ -148,8 +150,12 @@ impl APTRepositoryHandle {
}
APTRepositoryHandle::NoSubscription => "/etc/apt/sources.list".to_string(),
APTRepositoryHandle::Test => "/etc/apt/sources.list".to_string(),
APTRepositoryHandle::CephQuincyEnterprise => "/etc/apt/sources.list.d/ceph.list".to_string(),
APTRepositoryHandle::CephQuincyNoSubscription => "/etc/apt/sources.list.d/ceph.list".to_string(),
APTRepositoryHandle::CephQuincyEnterprise => {
"/etc/apt/sources.list.d/ceph.list".to_string()
}
APTRepositoryHandle::CephQuincyNoSubscription => {
"/etc/apt/sources.list.d/ceph.list".to_string()
}
APTRepositoryHandle::CephQuincyTest => "/etc/apt/sources.list.d/ceph.list".to_string(),
}
}

View File

@ -59,7 +59,10 @@ fn normalize_url(mut api_url: String) -> String {
impl Login {
/// Prepare a request given an existing ticket string.
pub fn renew(api_url: impl Into<String>, ticket: impl Into<String>) -> Result<Self, TicketError> {
pub fn renew(
api_url: impl Into<String>,
ticket: impl Into<String>,
) -> Result<Self, TicketError> {
Ok(Self::renew_ticket(api_url, ticket.into().parse()?))
}
@ -79,7 +82,11 @@ impl Login {
}
/// Prepare a request given a userid and password.
pub fn new(api_url: impl Into<String>, userid: impl Into<String>, password: impl Into<String>) -> Self {
pub fn new(
api_url: impl Into<String>,
userid: impl Into<String>,
password: impl Into<String>,
) -> Self {
Self {
api_url: normalize_url(api_url.into()),
userid: userid.into(),
@ -107,8 +114,7 @@ impl Login {
..Default::default()
};
let body = serde_json::to_string(&request)
.unwrap(); // this can never fail
let body = serde_json::to_string(&request).unwrap(); // this can never fail
Request {
url: format!("{}/api2/json/access/ticket", self.api_url),

View File

@ -3,22 +3,16 @@ use std::path::{Path, PathBuf};
use anyhow::{bail, Error};
use serde_json::{json, Value};
use proxmox_sys::fs::{
replace_file,
open_file_locked,
file_get_json,
CreateOptions,
};
use proxmox_sys::fs::{file_get_json, open_file_locked, replace_file, CreateOptions};
use proxmox_time::epoch_i64;
use super::{PublicAuthState, PrivateAuthState};
use super::{PrivateAuthState, PublicAuthState};
fn load_auth_state_locked(
state_dir: &Path,
realm: &str,
default: Option<Value>,
) -> Result<(PathBuf, std::fs::File, Vec<Value>), Error> {
let mut lock_path = state_dir.to_owned();
lock_path.push(format!("proxmox-openid-auth-state-{}.lck", realm));
@ -26,7 +20,7 @@ fn load_auth_state_locked(
lock_path,
std::time::Duration::new(10, 0),
true,
CreateOptions::new()
CreateOptions::new(),
)?;
let mut path = state_dir.to_owned();
@ -38,7 +32,7 @@ fn load_auth_state_locked(
let mut data: Vec<Value> = Vec::new();
let timeout = 10*60; // 10 minutes
let timeout = 10 * 60; // 10 minutes
for v in old_data.as_array().unwrap() {
let ctime = v["ctime"].as_i64().unwrap_or(0);
@ -51,11 +45,7 @@ fn load_auth_state_locked(
Ok((path, lock, data))
}
fn replace_auth_state(
path: &Path,
data: &Vec<Value>,
) -> Result<(), Error> {
fn replace_auth_state(path: &Path, data: &Vec<Value>) -> Result<(), Error> {
let mode = nix::sys::stat::Mode::from_bits_truncate(0o0600);
let options = CreateOptions::new().perm(mode);
let raw = serde_json::to_string_pretty(data)?;
@ -69,10 +59,10 @@ pub fn verify_public_auth_state(
state_dir: &Path,
state: &str,
) -> Result<(String, PrivateAuthState), Error> {
let public_auth_state: PublicAuthState = serde_json::from_str(state)?;
let (path, _lock, old_data) = load_auth_state_locked(state_dir, &public_auth_state.realm, None)?;
let (path, _lock, old_data) =
load_auth_state_locked(state_dir, &public_auth_state.realm, None)?;
let mut data: Vec<Value> = Vec::new();
@ -101,7 +91,6 @@ pub fn store_auth_state(
realm: &str,
auth_state: &PrivateAuthState,
) -> Result<(), Error> {
let (path, _lock, mut data) = load_auth_state_locked(state_dir, realm, Some(json!([])))?;
if data.len() > 100 {

View File

@ -10,21 +10,14 @@ pub use http_client::http_client;
mod auth_state;
pub use auth_state::*;
use openidconnect::{
//curl::http_client,
core::{
CoreProviderMetadata,
CoreClient,
CoreIdTokenClaims,
CoreIdTokenVerifier,
CoreAuthenticationFlow,
CoreAuthDisplay,
CoreAuthPrompt,
CoreGenderClaim,
CoreAuthDisplay, CoreAuthPrompt, CoreAuthenticationFlow, CoreClient, CoreGenderClaim,
CoreIdTokenClaims, CoreIdTokenVerifier, CoreProviderMetadata,
},
PkceCodeChallenge,
PkceCodeVerifier,
AdditionalClaims,
AuthenticationContextClass,
AuthorizationCode,
ClientId,
ClientSecret,
@ -32,11 +25,11 @@ use openidconnect::{
IssuerUrl,
Nonce,
OAuth2TokenResponse,
PkceCodeChallenge,
PkceCodeVerifier,
RedirectUrl,
Scope,
UserInfoClaims,
AdditionalClaims,
AuthenticationContextClass,
};
/// Stores Additional Claims into a serde_json::Value;
@ -50,13 +43,13 @@ pub type GenericUserInfoClaims = UserInfoClaims<GenericClaims, CoreGenderClaim>;
pub struct OpenIdConfig {
pub issuer_url: String,
pub client_id: String,
#[serde(skip_serializing_if="Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none")]
pub client_key: Option<String>,
#[serde(skip_serializing_if="Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none")]
pub scopes: Option<Vec<String>>,
#[serde(skip_serializing_if="Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt: Option<String>,
#[serde(skip_serializing_if="Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none")]
pub acr_values: Option<Vec<String>>,
}
@ -80,7 +73,6 @@ pub struct PrivateAuthState {
}
impl PrivateAuthState {
pub fn new() -> Self {
let nonce = Nonce::new_random();
let csrf_token = CsrfToken::new_random();
@ -113,20 +105,15 @@ impl PrivateAuthState {
}
impl OpenIdAuthenticator {
pub fn discover(config: &OpenIdConfig, redirect_url: &str) -> Result<Self, Error> {
let client_id = ClientId::new(config.client_id.clone());
let client_key = config.client_key.clone().map(|key| ClientSecret::new(key));
let issuer_url = IssuerUrl::new(config.issuer_url.clone())?;
let provider_metadata = CoreProviderMetadata::discover(&issuer_url, http_client)?;
let client = CoreClient::from_provider_metadata(
provider_metadata,
client_id,
client_key,
).set_redirect_uri(RedirectUrl::new(String::from(redirect_url))?);
let client = CoreClient::from_provider_metadata(provider_metadata, client_id, client_key)
.set_redirect_uri(RedirectUrl::new(String::from(redirect_url))?);
Ok(Self {
client,
@ -135,15 +122,15 @@ impl OpenIdAuthenticator {
}
pub fn authorize_url(&self, state_dir: &str, realm: &str) -> Result<String, Error> {
let private_auth_state = PrivateAuthState::new();
let public_auth_state = private_auth_state.public_state_string(realm.to_string())?;
let nonce = private_auth_state.nonce.clone();
store_auth_state(Path::new(state_dir), realm, &private_auth_state)?;
// Generate the authorization URL to which we'll redirect the user.
let mut request = self.client
// Generate the authorization URL to which we'll redirect the user.
let mut request = self
.client
.authorize_url(
CoreAuthenticationFlow::AuthorizationCode,
|| CsrfToken::new(public_auth_state),
@ -154,7 +141,7 @@ impl OpenIdAuthenticator {
request = request.set_display(CoreAuthDisplay::Page);
match self.config.prompt.as_deref() {
None => { /* nothing */ },
None => { /* nothing */ }
Some("none") => {
request = request.add_prompt(CoreAuthPrompt::None);
}
@ -201,10 +188,10 @@ impl OpenIdAuthenticator {
code: &str,
private_auth_state: &PrivateAuthState,
) -> Result<(CoreIdTokenClaims, GenericUserInfoClaims), Error> {
let code = AuthorizationCode::new(code.to_string());
// Exchange the code with a token.
let token_response = self.client
let token_response = self
.client
.exchange_code(code)
.set_pkce_verifier(private_auth_state.pkce_verifier())
.request(http_client)
@ -218,7 +205,8 @@ impl OpenIdAuthenticator {
.claims(&id_token_verifier, &private_auth_state.nonce)
.map_err(|err| format_err!("Failed to verify ID token: {}", err))?;
let userinfo_claims: GenericUserInfoClaims = self.client
let userinfo_claims: GenericUserInfoClaims = self
.client
.user_info(token_response.access_token().to_owned(), None)?
.request(http_client)
.map_err(|err| format_err!("Failed to contact userinfo endpoint: {}", err))?;
@ -232,8 +220,8 @@ impl OpenIdAuthenticator {
code: &str,
private_auth_state: &PrivateAuthState,
) -> Result<Value, Error> {
let (id_token_claims, userinfo_claims) = self.verify_authorization_code(&code, &private_auth_state)?;
let (id_token_claims, userinfo_claims) =
self.verify_authorization_code(&code, &private_auth_state)?;
let mut data = serde_json::to_value(id_token_claims)?;

View File

@ -13,7 +13,7 @@ use lazy_static::lazy_static;
use nix::unistd::Pid;
use serde::Serialize;
use crate::fs::{read_firstline, file_read_firstline};
use crate::fs::{file_read_firstline, read_firstline};
pub mod mountinfo;
#[doc(inline)]