rrd: spell out hard to understand abbreviations in public types

Signed-off-by: Lukas Wagner <l.wagner@proxmox.com>
This commit is contained in:
Lukas Wagner 2024-01-31 16:26:01 +01:00
parent f9e8ebfdc8
commit 2f94283367
7 changed files with 113 additions and 110 deletions

View File

@ -14,7 +14,7 @@ use proxmox_schema::{api, ApiStringFormat, ApiType, IntegerSchema, Schema, Strin
use proxmox_sys::fs::CreateOptions; use proxmox_sys::fs::CreateOptions;
use proxmox_rrd::rrd::{CF, DST, RRA, RRD}; use proxmox_rrd::rrd::{AggregationFn, Archive, DataSourceType, Database};
pub const RRA_INDEX_SCHEMA: Schema = IntegerSchema::new("Index of the RRA.").minimum(0).schema(); pub const RRA_INDEX_SCHEMA: Schema = IntegerSchema::new("Index of the RRA.").minimum(0).schema();
@ -31,7 +31,7 @@ pub const RRA_CONFIG_STRING_SCHEMA: Schema = StringSchema::new("RRA configuratio
pub struct RRAConfig { pub struct RRAConfig {
/// Time resolution /// Time resolution
pub r: u64, pub r: u64,
pub cf: CF, pub cf: AggregationFn,
/// Number of data points /// Number of data points
pub n: u64, pub n: u64,
} }
@ -47,7 +47,7 @@ pub struct RRAConfig {
)] )]
/// Dump the RRD file in JSON format /// Dump the RRD file in JSON format
pub fn dump_rrd(path: String) -> Result<(), Error> { pub fn dump_rrd(path: String) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
serde_json::to_writer_pretty(std::io::stdout(), &rrd)?; serde_json::to_writer_pretty(std::io::stdout(), &rrd)?;
println!(); println!();
Ok(()) Ok(())
@ -64,7 +64,7 @@ pub fn dump_rrd(path: String) -> Result<(), Error> {
)] )]
/// RRD file information /// RRD file information
pub fn rrd_info(path: String) -> Result<(), Error> { pub fn rrd_info(path: String) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
println!("DST: {:?}", rrd.source.dst); println!("DST: {:?}", rrd.source.dst);
@ -106,7 +106,7 @@ pub fn update_rrd(path: String, time: Option<u64>, value: f64) -> Result<(), Err
.map(|v| v as f64) .map(|v| v as f64)
.unwrap_or_else(proxmox_time::epoch_f64); .unwrap_or_else(proxmox_time::epoch_f64);
let mut rrd = RRD::load(&path, false)?; let mut rrd = Database::load(&path, false)?;
rrd.update(time, value); rrd.update(time, value);
rrd.save(&path, CreateOptions::new(), false)?; rrd.save(&path, CreateOptions::new(), false)?;
@ -140,12 +140,12 @@ pub fn update_rrd(path: String, time: Option<u64>, value: f64) -> Result<(), Err
/// Fetch data from the RRD file /// Fetch data from the RRD file
pub fn fetch_rrd( pub fn fetch_rrd(
path: String, path: String,
cf: CF, cf: AggregationFn,
resolution: u64, resolution: u64,
start: Option<u64>, start: Option<u64>,
end: Option<u64>, end: Option<u64>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
let data = rrd.extract_data(cf, resolution, start, end)?; let data = rrd.extract_data(cf, resolution, start, end)?;
@ -169,7 +169,7 @@ pub fn fetch_rrd(
/// Return the Unix timestamp of the first time slot inside the /// Return the Unix timestamp of the first time slot inside the
/// specified RRA (slot start time) /// specified RRA (slot start time)
pub fn first_update_time(path: String, rra_index: usize) -> Result<(), Error> { pub fn first_update_time(path: String, rra_index: usize) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
if rra_index >= rrd.rra_list.len() { if rra_index >= rrd.rra_list.len() {
bail!("rra-index is out of range"); bail!("rra-index is out of range");
@ -193,7 +193,7 @@ pub fn first_update_time(path: String, rra_index: usize) -> Result<(), Error> {
)] )]
/// Return the Unix timestamp of the last update /// Return the Unix timestamp of the last update
pub fn last_update_time(path: String) -> Result<(), Error> { pub fn last_update_time(path: String) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
println!("{}", rrd.source.last_update); println!("{}", rrd.source.last_update);
Ok(()) Ok(())
@ -210,7 +210,7 @@ pub fn last_update_time(path: String) -> Result<(), Error> {
)] )]
/// Return the time and value from the last update /// Return the time and value from the last update
pub fn last_update(path: String) -> Result<(), Error> { pub fn last_update(path: String) -> Result<(), Error> {
let rrd = RRD::load(&PathBuf::from(path), false)?; let rrd = Database::load(&PathBuf::from(path), false)?;
let result = json!({ let result = json!({
"time": rrd.source.last_update, "time": rrd.source.last_update,
@ -242,19 +242,19 @@ pub fn last_update(path: String) -> Result<(), Error> {
}, },
)] )]
/// Create a new RRD file /// Create a new RRD file
pub fn create_rrd(dst: DST, path: String, rra: Vec<String>) -> Result<(), Error> { pub fn create_rrd(dst: DataSourceType, path: String, rra: Vec<String>) -> Result<(), Error> {
let mut rra_list = Vec::new(); let mut rra_list = Vec::new();
for item in rra.iter() { for item in rra.iter() {
let rra: RRAConfig = let rra: RRAConfig =
serde_json::from_value(RRAConfig::API_SCHEMA.parse_property_string(item)?)?; serde_json::from_value(RRAConfig::API_SCHEMA.parse_property_string(item)?)?;
println!("GOT {:?}", rra); println!("GOT {:?}", rra);
rra_list.push(RRA::new(rra.cf, rra.r, rra.n as usize)); rra_list.push(Archive::new(rra.cf, rra.r, rra.n as usize));
} }
let path = PathBuf::from(path); let path = PathBuf::from(path);
let rrd = RRD::new(dst, rra_list); let rrd = Database::new(dst, rra_list);
rrd.save(&path, CreateOptions::new(), false)?; rrd.save(&path, CreateOptions::new(), false)?;
@ -281,7 +281,7 @@ pub fn create_rrd(dst: DST, path: String, rra: Vec<String>) -> Result<(), Error>
pub fn resize_rrd(path: String, rra_index: usize, slots: i64) -> Result<(), Error> { pub fn resize_rrd(path: String, rra_index: usize, slots: i64) -> Result<(), Error> {
let path = PathBuf::from(&path); let path = PathBuf::from(&path);
let mut rrd = RRD::load(&path, false)?; let mut rrd = Database::load(&path, false)?;
if rra_index >= rrd.rra_list.len() { if rra_index >= rrd.rra_list.len() {
bail!("rra-index is out of range"); bail!("rra-index is out of range");
@ -305,7 +305,7 @@ pub fn resize_rrd(path: String, rra_index: usize, slots: i64) -> Result<(), Erro
.extract_data(rra_start, rra_end, rrd.source.last_update) .extract_data(rra_start, rra_end, rrd.source.last_update)
.into(); .into();
let mut new_rra = RRA::new(rra.cf, rra.resolution, new_slots as usize); let mut new_rra = Archive::new(rra.cf, rra.resolution, new_slots as usize);
new_rra.last_count = rra.last_count; new_rra.last_count = rra.last_count;
new_rra.insert_data(start, reso, data)?; new_rra.insert_data(start, reso, data)?;

View File

@ -12,7 +12,7 @@ use crossbeam_channel::{bounded, TryRecvError};
use proxmox_sys::fs::{create_path, CreateOptions}; use proxmox_sys::fs::{create_path, CreateOptions};
use crate::rrd::{CF, DST, RRA, RRD}; use crate::rrd::{AggregationFn, Archive, DataSourceType, Database};
use crate::Entry; use crate::Entry;
mod journal; mod journal;
@ -25,7 +25,7 @@ use rrd_map::*;
/// ///
/// This cache is designed to run as single instance (no concurrent /// This cache is designed to run as single instance (no concurrent
/// access from other processes). /// access from other processes).
pub struct RRDCache { pub struct Cache {
config: Arc<CacheConfig>, config: Arc<CacheConfig>,
state: Arc<RwLock<JournalState>>, state: Arc<RwLock<JournalState>>,
rrd_map: Arc<RwLock<RRDMap>>, rrd_map: Arc<RwLock<RRDMap>>,
@ -38,7 +38,7 @@ pub(crate) struct CacheConfig {
dir_options: CreateOptions, dir_options: CreateOptions,
} }
impl RRDCache { impl Cache {
/// Creates a new instance /// Creates a new instance
/// ///
/// `basedir`: All files are stored relative to this path. /// `basedir`: All files are stored relative to this path.
@ -58,7 +58,7 @@ impl RRDCache {
file_options: Option<CreateOptions>, file_options: Option<CreateOptions>,
dir_options: Option<CreateOptions>, dir_options: Option<CreateOptions>,
apply_interval: f64, apply_interval: f64,
load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DST) -> RRD, load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DataSourceType) -> Database,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let basedir = basedir.as_ref().to_owned(); let basedir = basedir.as_ref().to_owned();
@ -103,23 +103,23 @@ impl RRDCache {
/// * cf=maximum,r=7*86400,n=570 => 10year /// * cf=maximum,r=7*86400,n=570 => 10year
/// ///
/// The resulting data file size is about 80KB. /// The resulting data file size is about 80KB.
pub fn create_proxmox_backup_default_rrd(dst: DST) -> RRD { pub fn create_proxmox_backup_default_rrd(dst: DataSourceType) -> Database {
let rra_list = vec![ let rra_list = vec![
// 1 min * 1440 => 1 day // 1 min * 1440 => 1 day
RRA::new(CF::Average, 60, 1440), Archive::new(AggregationFn::Average, 60, 1440),
RRA::new(CF::Maximum, 60, 1440), Archive::new(AggregationFn::Maximum, 60, 1440),
// 30 min * 1440 => 30 days ~ 1 month // 30 min * 1440 => 30 days ~ 1 month
RRA::new(CF::Average, 30 * 60, 1440), Archive::new(AggregationFn::Average, 30 * 60, 1440),
RRA::new(CF::Maximum, 30 * 60, 1440), Archive::new(AggregationFn::Maximum, 30 * 60, 1440),
// 6 h * 1440 => 360 days ~ 1 year // 6 h * 1440 => 360 days ~ 1 year
RRA::new(CF::Average, 6 * 3600, 1440), Archive::new(AggregationFn::Average, 6 * 3600, 1440),
RRA::new(CF::Maximum, 6 * 3600, 1440), Archive::new(AggregationFn::Maximum, 6 * 3600, 1440),
// 1 week * 570 => 10 years // 1 week * 570 => 10 years
RRA::new(CF::Average, 7 * 86400, 570), Archive::new(AggregationFn::Average, 7 * 86400, 570),
RRA::new(CF::Maximum, 7 * 86400, 570), Archive::new(AggregationFn::Maximum, 7 * 86400, 570),
]; ];
RRD::new(dst, rra_list) Database::new(dst, rra_list)
} }
/// Sync the journal data to disk (using `fdatasync` syscall) /// Sync the journal data to disk (using `fdatasync` syscall)
@ -186,7 +186,7 @@ impl RRDCache {
rel_path: &str, rel_path: &str,
time: f64, time: f64,
value: f64, value: f64,
dst: DST, dst: DataSourceType,
) -> Result<(), Error> { ) -> Result<(), Error> {
let journal_applied = self.apply_journal()?; let journal_applied = self.apply_journal()?;
@ -214,7 +214,7 @@ impl RRDCache {
&self, &self,
base: &str, base: &str,
name: &str, name: &str,
cf: CF, cf: AggregationFn,
resolution: u64, resolution: u64,
start: Option<u64>, start: Option<u64>,
end: Option<u64>, end: Option<u64>,

View File

@ -15,7 +15,7 @@ use proxmox_sys::fs::atomic_open_or_create_file;
const RRD_JOURNAL_NAME: &str = "rrd.journal"; const RRD_JOURNAL_NAME: &str = "rrd.journal";
use crate::cache::CacheConfig; use crate::cache::CacheConfig;
use crate::rrd::DST; use crate::rrd::DataSourceType;
// shared state behind RwLock // shared state behind RwLock
pub struct JournalState { pub struct JournalState {
@ -29,7 +29,7 @@ pub struct JournalState {
pub struct JournalEntry { pub struct JournalEntry {
pub time: f64, pub time: f64,
pub value: f64, pub value: f64,
pub dst: DST, pub dst: DataSourceType,
pub rel_path: String, pub rel_path: String,
} }
@ -55,8 +55,8 @@ impl FromStr for JournalEntry {
.map_err(|_| format_err!("unable to parse data source type"))?; .map_err(|_| format_err!("unable to parse data source type"))?;
let dst = match dst { let dst = match dst {
0 => DST::Gauge, 0 => DataSourceType::Gauge,
1 => DST::Derive, 1 => DataSourceType::Derive,
_ => bail!("got strange value for data source type '{}'", dst), _ => bail!("got strange value for data source type '{}'", dst),
}; };
@ -98,7 +98,7 @@ impl JournalState {
&mut self, &mut self,
time: f64, time: f64,
value: f64, value: f64,
dst: DST, dst: DataSourceType,
rel_path: &str, rel_path: &str,
) -> Result<(), Error> { ) -> Result<(), Error> {
let journal_entry = format!("{}:{}:{}:{}\n", time, value, dst as u8, rel_path); let journal_entry = format!("{}:{}:{}:{}\n", time, value, dst as u8, rel_path);

View File

@ -6,21 +6,21 @@ use anyhow::{bail, Error};
use proxmox_sys::fs::create_path; use proxmox_sys::fs::create_path;
use crate::rrd::{CF, DST, RRD}; use crate::rrd::{AggregationFn, DataSourceType, Database};
use super::CacheConfig; use super::CacheConfig;
use crate::Entry; use crate::Entry;
pub struct RRDMap { pub struct RRDMap {
config: Arc<CacheConfig>, config: Arc<CacheConfig>,
map: HashMap<String, RRD>, map: HashMap<String, Database>,
load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DST) -> RRD, load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DataSourceType) -> Database,
} }
impl RRDMap { impl RRDMap {
pub(crate) fn new( pub(crate) fn new(
config: Arc<CacheConfig>, config: Arc<CacheConfig>,
load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DST) -> RRD, load_rrd_cb: fn(path: &Path, rel_path: &str, dst: DataSourceType) -> Database,
) -> Self { ) -> Self {
Self { Self {
config, config,
@ -34,7 +34,7 @@ impl RRDMap {
rel_path: &str, rel_path: &str,
time: f64, time: f64,
value: f64, value: f64,
dst: DST, dst: DataSourceType,
new_only: bool, new_only: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
if let Some(rrd) = self.map.get_mut(rel_path) { if let Some(rrd) = self.map.get_mut(rel_path) {
@ -84,7 +84,7 @@ impl RRDMap {
&self, &self,
base: &str, base: &str,
name: &str, name: &str,
cf: CF, cf: AggregationFn,
resolution: u64, resolution: u64,
start: Option<u64>, start: Option<u64>,
end: Option<u64>, end: Option<u64>,

View File

@ -29,7 +29,7 @@ pub const PROXMOX_RRD_MAGIC_2_0: [u8; 8] = [224, 200, 228, 27, 239, 112, 122, 15
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
/// RRD data source type /// RRD data source type
pub enum DST { pub enum DataSourceType {
/// Gauge values are stored unmodified. /// Gauge values are stored unmodified.
Gauge, Gauge,
/// Stores the difference to the previous value. /// Stores the difference to the previous value.
@ -42,8 +42,8 @@ pub enum DST {
#[api()] #[api()]
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
/// Consolidation function /// Aggregation function
pub enum CF { pub enum AggregationFn {
/// Average /// Average
Average, Average,
/// Maximum /// Maximum
@ -58,7 +58,7 @@ pub enum CF {
/// Data source specification /// Data source specification
pub struct DataSource { pub struct DataSource {
/// Data source type /// Data source type
pub dst: DST, pub dst: DataSourceType,
/// Last update time (epoch) /// Last update time (epoch)
pub last_update: f64, pub last_update: f64,
/// Stores the last value, used to compute differential value for /// Stores the last value, used to compute differential value for
@ -110,7 +110,7 @@ impl From<(u64, u64, Vec<Option<f64>>)> for Entry {
impl DataSource { impl DataSource {
/// Create a new Instance /// Create a new Instance
pub fn new(dst: DST) -> Self { pub fn new(dst: DataSourceType) -> Self {
Self { Self {
dst, dst,
last_update: 0.0, last_update: 0.0,
@ -131,9 +131,9 @@ impl DataSource {
} }
// derive counter value // derive counter value
let is_counter = self.dst == DST::Counter; let is_counter = self.dst == DataSourceType::Counter;
if is_counter || self.dst == DST::Derive { if is_counter || self.dst == DataSourceType::Derive {
let time_diff = time - self.last_update; let time_diff = time - self.last_update;
let diff = if self.last_value.is_nan() { let diff = if self.last_value.is_nan() {
@ -161,20 +161,20 @@ impl DataSource {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
/// Round Robin Archive /// Round Robin Archive
pub struct RRA { pub struct Archive {
/// Number of seconds spanned by a single data entry. /// Number of seconds spanned by a single data entry.
pub resolution: u64, pub resolution: u64,
/// Consolidation function. /// Consolidation function.
pub cf: CF, pub cf: AggregationFn,
/// Count values computed inside this update interval. /// Count values computed inside this update interval.
pub last_count: u64, pub last_count: u64,
/// The actual data entries. /// The actual data entries.
pub data: Vec<f64>, pub data: Vec<f64>,
} }
impl RRA { impl Archive {
/// Creates a new instance /// Creates a new instance
pub fn new(cf: CF, resolution: u64, points: usize) -> Self { pub fn new(cf: AggregationFn, resolution: u64, points: usize) -> Self {
Self { Self {
cf, cf,
resolution, resolution,
@ -275,22 +275,22 @@ impl RRA {
self.last_count = 1; self.last_count = 1;
} else { } else {
let new_value = match self.cf { let new_value = match self.cf {
CF::Maximum => { AggregationFn::Maximum => {
if last_value > value { if last_value > value {
last_value last_value
} else { } else {
value value
} }
} }
CF::Minimum => { AggregationFn::Minimum => {
if last_value < value { if last_value < value {
last_value last_value
} else { } else {
value value
} }
} }
CF::Last => value, AggregationFn::Last => value,
CF::Average => { AggregationFn::Average => {
(last_value * (self.last_count as f64)) / (new_count as f64) (last_value * (self.last_count as f64)) / (new_count as f64)
+ value / (new_count as f64) + value / (new_count as f64)
} }
@ -344,19 +344,19 @@ impl RRA {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
/// Round Robin Database /// Round Robin Database
pub struct RRD { pub struct Database {
/// The data source definition /// The data source definition
pub source: DataSource, pub source: DataSource,
/// List of round robin archives /// List of round robin archives
pub rra_list: Vec<RRA>, pub rra_list: Vec<Archive>,
} }
impl RRD { impl Database {
/// Creates a new Instance /// Creates a new Instance
pub fn new(dst: DST, rra_list: Vec<RRA>) -> RRD { pub fn new(dst: DataSourceType, rra_list: Vec<Archive>) -> Database {
let source = DataSource::new(dst); let source = DataSource::new(dst);
RRD { source, rra_list } Database { source, rra_list }
} }
fn from_raw(raw: &[u8]) -> Result<Self, Error> { fn from_raw(raw: &[u8]) -> Result<Self, Error> {
@ -364,18 +364,16 @@ impl RRD {
bail!("not an rrd file - file is too small ({})", raw.len()); bail!("not an rrd file - file is too small ({})", raw.len());
} }
let rrd: RRD = match &raw[0..8] { let rrd: Database = match &raw[0..8] {
#[cfg(feature = "rrd_v1")] #[cfg(feature = "rrd_v1")]
magic if magic == crate::rrd_v1::PROXMOX_RRD_MAGIC_1_0 => { magic if magic == crate::rrd_v1::PROXMOX_RRD_MAGIC_1_0 => {
let v1 = crate::rrd_v1::RRDv1::from_raw(raw)?; let v1 = crate::rrd_v1::RRDv1::from_raw(raw)?;
v1.to_rrd_v2() v1.to_rrd_v2()
.map_err(|err| format_err!("unable to convert from old V1 format - {err}"))? .map_err(|err| format_err!("unable to convert from old V1 format - {err}"))?
} }
magic if magic == PROXMOX_RRD_MAGIC_2_0 => { magic if magic == PROXMOX_RRD_MAGIC_2_0 => serde_cbor::from_slice(&raw[8..])
serde_cbor::from_slice(&raw[8..]) .map_err(|err| format_err!("unable to decode RRD file - {err}"))?,
.map_err(|err| format_err!("unable to decode RRD file - {err}"))? _ => bail!("not an rrd file - unknown magic number"),
}
_ => bail!("not an rrd file - unknown magic number")
}; };
if rrd.source.last_update < 0.0 { if rrd.source.last_update < 0.0 {
@ -491,19 +489,19 @@ impl RRD {
/// Extract data from the archive /// Extract data from the archive
/// ///
/// This selects the RRA with specified [CF] and (minimum) /// This selects the RRA with specified [AggregationFn] and (minimum)
/// resolution, and extract data from `start` to `end`. /// resolution, and extract data from `start` to `end`.
/// ///
/// `start`: Start time. If not specified, we simply extract 10 data points. /// `start`: Start time. If not specified, we simply extract 10 data points.
/// `end`: End time. Default is to use the current time. /// `end`: End time. Default is to use the current time.
pub fn extract_data( pub fn extract_data(
&self, &self,
cf: CF, cf: AggregationFn,
resolution: u64, resolution: u64,
start: Option<u64>, start: Option<u64>,
end: Option<u64>, end: Option<u64>,
) -> Result<Entry, Error> { ) -> Result<Entry, Error> {
let mut rra: Option<&RRA> = None; let mut rra: Option<&Archive> = None;
for item in self.rra_list.iter() { for item in self.rra_list.iter() {
if item.cf != cf { if item.cf != cf {
continue; continue;
@ -538,8 +536,8 @@ mod tests {
#[test] #[test]
fn basic_rra_maximum_gauge_test() -> Result<(), Error> { fn basic_rra_maximum_gauge_test() -> Result<(), Error> {
let rra = RRA::new(CF::Maximum, 60, 5); let rra = Archive::new(AggregationFn::Maximum, 60, 5);
let mut rrd = RRD::new(DST::Gauge, vec![rra]); let mut rrd = Database::new(DataSourceType::Gauge, vec![rra]);
for i in 2..10 { for i in 2..10 {
rrd.update((i as f64) * 30.0, i as f64); rrd.update((i as f64) * 30.0, i as f64);
@ -549,7 +547,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Maximum, 60, Some(0), Some(5 * 60))?; } = rrd.extract_data(AggregationFn::Maximum, 60, Some(0), Some(5 * 60))?;
assert_eq!(start, 0); assert_eq!(start, 0);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [None, Some(3.0), Some(5.0), Some(7.0), Some(9.0)]); assert_eq!(data, [None, Some(3.0), Some(5.0), Some(7.0), Some(9.0)]);
@ -559,8 +557,8 @@ mod tests {
#[test] #[test]
fn basic_rra_minimum_gauge_test() -> Result<(), Error> { fn basic_rra_minimum_gauge_test() -> Result<(), Error> {
let rra = RRA::new(CF::Minimum, 60, 5); let rra = Archive::new(AggregationFn::Minimum, 60, 5);
let mut rrd = RRD::new(DST::Gauge, vec![rra]); let mut rrd = Database::new(DataSourceType::Gauge, vec![rra]);
for i in 2..10 { for i in 2..10 {
rrd.update((i as f64) * 30.0, i as f64); rrd.update((i as f64) * 30.0, i as f64);
@ -570,7 +568,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Minimum, 60, Some(0), Some(5 * 60))?; } = rrd.extract_data(AggregationFn::Minimum, 60, Some(0), Some(5 * 60))?;
assert_eq!(start, 0); assert_eq!(start, 0);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [None, Some(2.0), Some(4.0), Some(6.0), Some(8.0)]); assert_eq!(data, [None, Some(2.0), Some(4.0), Some(6.0), Some(8.0)]);
@ -580,15 +578,15 @@ mod tests {
#[test] #[test]
fn basic_rra_last_gauge_test() -> Result<(), Error> { fn basic_rra_last_gauge_test() -> Result<(), Error> {
let rra = RRA::new(CF::Last, 60, 5); let rra = Archive::new(AggregationFn::Last, 60, 5);
let mut rrd = RRD::new(DST::Gauge, vec![rra]); let mut rrd = Database::new(DataSourceType::Gauge, vec![rra]);
for i in 2..10 { for i in 2..10 {
rrd.update((i as f64) * 30.0, i as f64); rrd.update((i as f64) * 30.0, i as f64);
} }
assert!( assert!(
rrd.extract_data(CF::Average, 60, Some(0), Some(5 * 60)) rrd.extract_data(AggregationFn::Average, 60, Some(0), Some(5 * 60))
.is_err(), .is_err(),
"CF::Average should not exist" "CF::Average should not exist"
); );
@ -597,7 +595,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Last, 60, Some(0), Some(20 * 60))?; } = rrd.extract_data(AggregationFn::Last, 60, Some(0), Some(20 * 60))?;
assert_eq!(start, 0); assert_eq!(start, 0);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [None, Some(3.0), Some(5.0), Some(7.0), Some(9.0)]); assert_eq!(data, [None, Some(3.0), Some(5.0), Some(7.0), Some(9.0)]);
@ -607,8 +605,8 @@ mod tests {
#[test] #[test]
fn basic_rra_average_derive_test() -> Result<(), Error> { fn basic_rra_average_derive_test() -> Result<(), Error> {
let rra = RRA::new(CF::Average, 60, 5); let rra = Archive::new(AggregationFn::Average, 60, 5);
let mut rrd = RRD::new(DST::Derive, vec![rra]); let mut rrd = Database::new(DataSourceType::Derive, vec![rra]);
for i in 2..10 { for i in 2..10 {
rrd.update((i as f64) * 30.0, (i * 60) as f64); rrd.update((i as f64) * 30.0, (i * 60) as f64);
@ -618,7 +616,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(60), Some(5 * 60))?; } = rrd.extract_data(AggregationFn::Average, 60, Some(60), Some(5 * 60))?;
assert_eq!(start, 60); assert_eq!(start, 60);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [Some(1.0), Some(2.0), Some(2.0), Some(2.0), None]); assert_eq!(data, [Some(1.0), Some(2.0), Some(2.0), Some(2.0), None]);
@ -628,8 +626,8 @@ mod tests {
#[test] #[test]
fn basic_rra_average_gauge_test() -> Result<(), Error> { fn basic_rra_average_gauge_test() -> Result<(), Error> {
let rra = RRA::new(CF::Average, 60, 5); let rra = Archive::new(AggregationFn::Average, 60, 5);
let mut rrd = RRD::new(DST::Gauge, vec![rra]); let mut rrd = Database::new(DataSourceType::Gauge, vec![rra]);
for i in 2..10 { for i in 2..10 {
rrd.update((i as f64) * 30.0, i as f64); rrd.update((i as f64) * 30.0, i as f64);
@ -639,7 +637,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(60), Some(5 * 60))?; } = rrd.extract_data(AggregationFn::Average, 60, Some(60), Some(5 * 60))?;
assert_eq!(start, 60); assert_eq!(start, 60);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [Some(2.5), Some(4.5), Some(6.5), Some(8.5), None]); assert_eq!(data, [Some(2.5), Some(4.5), Some(6.5), Some(8.5), None]);
@ -652,7 +650,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(60), Some(5 * 60))?; } = rrd.extract_data(AggregationFn::Average, 60, Some(60), Some(5 * 60))?;
assert_eq!(start, 60); assert_eq!(start, 60);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [None, Some(4.5), Some(6.5), Some(8.5), Some(10.5)]); assert_eq!(data, [None, Some(4.5), Some(6.5), Some(8.5), Some(10.5)]);
@ -661,7 +659,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(3 * 60), Some(8 * 60))?; } = rrd.extract_data(AggregationFn::Average, 60, Some(3 * 60), Some(8 * 60))?;
assert_eq!(start, 3 * 60); assert_eq!(start, 3 * 60);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [Some(6.5), Some(8.5), Some(10.5), Some(12.5), None]); assert_eq!(data, [Some(6.5), Some(8.5), Some(10.5), Some(12.5), None]);
@ -675,7 +673,12 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(100 * 30), Some(100 * 30 + 5 * 60))?; } = rrd.extract_data(
AggregationFn::Average,
60,
Some(100 * 30),
Some(100 * 30 + 5 * 60),
)?;
assert_eq!(start, 100 * 30); assert_eq!(start, 100 * 30);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, [Some(100.0), None, None, None, None]); assert_eq!(data, [Some(100.0), None, None, None, None]);
@ -685,7 +688,7 @@ mod tests {
start, start,
resolution, resolution,
data, data,
} = rrd.extract_data(CF::Average, 60, Some(100 * 30), Some(60))?; } = rrd.extract_data(AggregationFn::Average, 60, Some(100 * 30), Some(60))?;
assert_eq!(start, 100 * 30); assert_eq!(start, 100 * 30);
assert_eq!(resolution, 60); assert_eq!(resolution, 60);
assert_eq!(data, []); assert_eq!(data, []);

View File

@ -10,7 +10,7 @@ pub const RRD_DATA_ENTRIES: usize = 70;
// openssl::sha::sha256(b"Proxmox Round Robin Database file v1.0")[0..8]; // openssl::sha::sha256(b"Proxmox Round Robin Database file v1.0")[0..8];
pub const PROXMOX_RRD_MAGIC_1_0: [u8; 8] = [206, 46, 26, 212, 172, 158, 5, 186]; pub const PROXMOX_RRD_MAGIC_1_0: [u8; 8] = [206, 46, 26, 212, 172, 158, 5, 186];
use crate::rrd::{DataSource, CF, DST, RRA, RRD}; use crate::rrd::{AggregationFn, Archive, DataSource, DataSourceType, Database};
bitflags! { bitflags! {
/// Flags to specify the data source type and consolidation function /// Flags to specify the data source type and consolidation function
@ -30,7 +30,7 @@ bitflags! {
/// Round Robin Archive with [RRD_DATA_ENTRIES] data slots. /// Round Robin Archive with [RRD_DATA_ENTRIES] data slots.
/// ///
/// This data structure is used inside [RRD] and directly written to the /// This data structure is used inside [Database] and directly written to the
/// RRD files. /// RRD files.
#[repr(C)] #[repr(C)]
pub struct RRAv1 { pub struct RRAv1 {
@ -75,7 +75,7 @@ impl RRAv1 {
} }
} }
/// Round Robin Database file format with fixed number of [RRA]s /// Round Robin Database file format with fixed number of [Archive]s
#[repr(C)] #[repr(C)]
// Note: Avoid alignment problems by using 8byte types only // Note: Avoid alignment problems by using 8byte types only
pub struct RRDv1 { pub struct RRDv1 {
@ -127,7 +127,7 @@ impl RRDv1 {
Ok(rrd) Ok(rrd)
} }
pub fn to_rrd_v2(&self) -> Result<RRD, Error> { pub fn to_rrd_v2(&self) -> Result<Database, Error> {
let mut rra_list = Vec::new(); let mut rra_list = Vec::new();
// old format v1: // old format v1:
@ -194,7 +194,7 @@ impl RRDv1 {
// Try to convert to new, higher capacity format // Try to convert to new, higher capacity format
// compute daily average (merge old self.day_avg and self.hour_avg // compute daily average (merge old self.day_avg and self.hour_avg
let mut day_avg = RRA::new(CF::Average, 60, 1440); let mut day_avg = Archive::new(AggregationFn::Average, 60, 1440);
let (start, reso, data) = self.day_avg.extract_data(); let (start, reso, data) = self.day_avg.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 30, data); let (start, reso, data) = extrapolate_data(start, reso, 30, data);
@ -204,7 +204,7 @@ impl RRDv1 {
day_avg.insert_data(start, reso, data)?; day_avg.insert_data(start, reso, data)?;
// compute daily maximum (merge old self.day_max and self.hour_max // compute daily maximum (merge old self.day_max and self.hour_max
let mut day_max = RRA::new(CF::Maximum, 60, 1440); let mut day_max = Archive::new(AggregationFn::Maximum, 60, 1440);
let (start, reso, data) = self.day_max.extract_data(); let (start, reso, data) = self.day_max.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 30, data); let (start, reso, data) = extrapolate_data(start, reso, 30, data);
@ -215,7 +215,7 @@ impl RRDv1 {
// compute monthly average (merge old self.month_avg, // compute monthly average (merge old self.month_avg,
// self.week_avg and self.day_avg) // self.week_avg and self.day_avg)
let mut month_avg = RRA::new(CF::Average, 30 * 60, 1440); let mut month_avg = Archive::new(AggregationFn::Average, 30 * 60, 1440);
let (start, reso, data) = self.month_avg.extract_data(); let (start, reso, data) = self.month_avg.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 24, data); let (start, reso, data) = extrapolate_data(start, reso, 24, data);
@ -230,7 +230,7 @@ impl RRDv1 {
// compute monthly maximum (merge old self.month_max, // compute monthly maximum (merge old self.month_max,
// self.week_max and self.day_max) // self.week_max and self.day_max)
let mut month_max = RRA::new(CF::Maximum, 30 * 60, 1440); let mut month_max = Archive::new(AggregationFn::Maximum, 30 * 60, 1440);
let (start, reso, data) = self.month_max.extract_data(); let (start, reso, data) = self.month_max.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 24, data); let (start, reso, data) = extrapolate_data(start, reso, 24, data);
@ -244,26 +244,26 @@ impl RRDv1 {
month_max.insert_data(start, reso, data)?; month_max.insert_data(start, reso, data)?;
// compute yearly average (merge old self.year_avg) // compute yearly average (merge old self.year_avg)
let mut year_avg = RRA::new(CF::Average, 6 * 3600, 1440); let mut year_avg = Archive::new(AggregationFn::Average, 6 * 3600, 1440);
let (start, reso, data) = self.year_avg.extract_data(); let (start, reso, data) = self.year_avg.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 28, data); let (start, reso, data) = extrapolate_data(start, reso, 28, data);
year_avg.insert_data(start, reso, data)?; year_avg.insert_data(start, reso, data)?;
// compute yearly maximum (merge old self.year_avg) // compute yearly maximum (merge old self.year_avg)
let mut year_max = RRA::new(CF::Maximum, 6 * 3600, 1440); let mut year_max = Archive::new(AggregationFn::Maximum, 6 * 3600, 1440);
let (start, reso, data) = self.year_max.extract_data(); let (start, reso, data) = self.year_max.extract_data();
let (start, reso, data) = extrapolate_data(start, reso, 28, data); let (start, reso, data) = extrapolate_data(start, reso, 28, data);
year_max.insert_data(start, reso, data)?; year_max.insert_data(start, reso, data)?;
// compute decade average (merge old self.year_avg) // compute decade average (merge old self.year_avg)
let mut decade_avg = RRA::new(CF::Average, 7 * 86400, 570); let mut decade_avg = Archive::new(AggregationFn::Average, 7 * 86400, 570);
let (start, reso, data) = self.year_avg.extract_data(); let (start, reso, data) = self.year_avg.extract_data();
decade_avg.insert_data(start, reso, data)?; decade_avg.insert_data(start, reso, data)?;
// compute decade maximum (merge old self.year_max) // compute decade maximum (merge old self.year_max)
let mut decade_max = RRA::new(CF::Maximum, 7 * 86400, 570); let mut decade_max = Archive::new(AggregationFn::Maximum, 7 * 86400, 570);
let (start, reso, data) = self.year_max.extract_data(); let (start, reso, data) = self.year_max.extract_data();
decade_max.insert_data(start, reso, data)?; decade_max.insert_data(start, reso, data)?;
@ -278,11 +278,11 @@ impl RRDv1 {
// use values from hour_avg for source (all RRAv1 must have the same config) // use values from hour_avg for source (all RRAv1 must have the same config)
let dst = if self.hour_avg.flags.contains(RRAFlags::DST_COUNTER) { let dst = if self.hour_avg.flags.contains(RRAFlags::DST_COUNTER) {
DST::Counter DataSourceType::Counter
} else if self.hour_avg.flags.contains(RRAFlags::DST_DERIVE) { } else if self.hour_avg.flags.contains(RRAFlags::DST_DERIVE) {
DST::Derive DataSourceType::Derive
} else { } else {
DST::Gauge DataSourceType::Gauge
}; };
let source = DataSource { let source = DataSource {
@ -290,6 +290,6 @@ impl RRDv1 {
last_value: f64::NAN, last_value: f64::NAN,
last_update: self.hour_avg.last_update, // IMPORTANT! last_update: self.hour_avg.last_update, // IMPORTANT!
}; };
Ok(RRD { source, rra_list }) Ok(Database { source, rra_list })
} }
} }

View File

@ -3,7 +3,7 @@ use std::process::Command;
use anyhow::{bail, Error}; use anyhow::{bail, Error};
use proxmox_rrd::rrd::RRD; use proxmox_rrd::rrd::Database;
use proxmox_sys::fs::CreateOptions; use proxmox_sys::fs::CreateOptions;
fn compare_file(fn1: &str, fn2: &str) -> Result<(), Error> { fn compare_file(fn1: &str, fn2: &str) -> Result<(), Error> {
@ -27,7 +27,7 @@ const RRD_V2_FN: &str = "./tests/testdata/cpu.rrd_v2";
#[cfg(feature = "rrd_v1")] #[cfg(feature = "rrd_v1")]
fn upgrade_from_rrd_v1() -> Result<(), Error> { fn upgrade_from_rrd_v1() -> Result<(), Error> {
const RRD_V1_FN: &str = "./tests/testdata/cpu.rrd_v1"; const RRD_V1_FN: &str = "./tests/testdata/cpu.rrd_v1";
let rrd = RRD::load(Path::new(RRD_V1_FN), true)?; let rrd = Database::load(Path::new(RRD_V1_FN), true)?;
const RRD_V2_NEW_FN: &str = "./tests/testdata/cpu.rrd_v2.upgraded"; const RRD_V2_NEW_FN: &str = "./tests/testdata/cpu.rrd_v2.upgraded";
let new_path = Path::new(RRD_V2_NEW_FN); let new_path = Path::new(RRD_V2_NEW_FN);
@ -43,7 +43,7 @@ fn upgrade_from_rrd_v1() -> Result<(), Error> {
// make sure we can load and save RRD v2 // make sure we can load and save RRD v2
#[test] #[test]
fn load_and_save_rrd_v2() -> Result<(), Error> { fn load_and_save_rrd_v2() -> Result<(), Error> {
let rrd = RRD::load(Path::new(RRD_V2_FN), true)?; let rrd = Database::load(Path::new(RRD_V2_FN), true)?;
const RRD_V2_NEW_FN: &str = "./tests/testdata/cpu.rrd_v2.saved"; const RRD_V2_NEW_FN: &str = "./tests/testdata/cpu.rrd_v2.saved";
let new_path = Path::new(RRD_V2_NEW_FN); let new_path = Path::new(RRD_V2_NEW_FN);