1
0
mirror of https://github.com/dkmstr/openuds.git synced 2025-03-20 06:50:23 +03:00

Fixing up stats. Removed "zeros" from DB

This commit is contained in:
Adolfo Gómez García 2022-11-09 23:01:17 +01:00
commit 4bf76143f2
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
6 changed files with 38 additions and 30 deletions

View File

@ -41,6 +41,7 @@ from ...fixtures import stats_counters as fixtures_stats_counters
from uds.core.workers import stats_collector
from uds.core.environment import Environment
from uds.core.util import config
START_DATE = datetime.datetime(2009, 12, 4, 0, 0, 0)
@ -79,7 +80,7 @@ class StatsAcummulatorTest(UDSTransactionTestCase):
)
# Setup worker
stats_collector.STATS_ACCUM_MAX_CHUNK_TIME.set(DAYS // 2 + 1)
config.GlobalConfig.STATS_ACCUM_MAX_CHUNK_TIME.set(DAYS // 2 + 1)
stats_collector.StatsAccumulator.setup()
def test_stats_accumulator(self):

View File

@ -73,7 +73,7 @@ class StatsManager(metaclass=singleton.Singleton):
def manager() -> 'StatsManager':
return StatsManager() # Singleton pattern will return always the same instance
def __doCleanup(self, model: typing.Type[typing.Union['StatsCounters', 'StatsEvents']]) -> None:
def __doCleanup(self, model: typing.Type[typing.Union['StatsCounters', 'StatsEvents', 'StatsCountersAccum']]) -> None:
minTime = time.mktime(
(
getSqlDatetime()
@ -178,6 +178,7 @@ class StatsManager(metaclass=singleton.Singleton):
Removes all counters previous to configured max keep time for stat information from database.
"""
self.__doCleanup(StatsCounters)
self.__doCleanup(StatsCountersAccum)
def getEventFldFor(self, fld: str) -> str:
'''

View File

@ -559,6 +559,21 @@ class GlobalConfig:
type=Config.NUMERIC_FIELD,
help=_('Statistics duration, in days'),
)
# Statisctis accumulation frequency, in seconds
STATS_ACCUM_FREQUENCY: Config.Value = Config.section(GLOBAL_SECTION).value(
'statsAccumFrequency',
'14400',
type=Config.NUMERIC_FIELD,
help=_('Frequency of stats collection in seconds. Default is 4 hours (14400 seconds)'),
)
# Statisctis accumulation chunk size, in days
STATS_ACCUM_MAX_CHUNK_TIME = Config.section(GLOBAL_SECTION).value(
'statsAccumMaxChunkTime',
'7',
type=Config.NUMERIC_FIELD,
help=_('Maximum number of time to accumulate on one run. Default is 7 (1 week)'),
)
# If disallow login showing authenticatiors
DISALLOW_GLOBAL_LOGIN: Config.Value = Config.section(GLOBAL_SECTION).value(
'disallowGlobalLogin',
@ -765,6 +780,7 @@ class GlobalConfig:
for v in GlobalConfig.__dict__.values():
if isinstance(v, Config.Value):
v.get()
logger.debug('Initialized global config value %s=%s', v.key(), v.get())
for c in _getLater:
logger.debug('Get later: %s', c)

View File

@ -44,19 +44,6 @@ from uds.core.util import config
logger = logging.getLogger(__name__)
# Early declaration of config variable
STATS_ACCUM_FREQUENCY = config.Config.section(config.ADMIN_SECTION).value(
'Stats Accumulation Frequency',
'14400',
type=config.Config.NUMERIC_FIELD,
help=_('Frequency of stats collection in seconds. Default is 4 hours (14400 seconds)'),
)
STATS_ACCUM_MAX_CHUNK_TIME = config.Config.section(config.ADMIN_SECTION).value(
'Stats Accumulation Chunk',
'7',
type=config.Config.NUMERIC_FIELD,
help=_('Maximum number of time to accumulate on one run. Default is 7 (1 week)'),
)
class DeployedServiceStatsCollector(Job):
"""
@ -179,13 +166,13 @@ class StatsAccumulator(Job):
"""
frecuency = 3600 # Executed every 4 hours
frecuency_cfg = (
STATS_ACCUM_FREQUENCY
config.GlobalConfig.STATS_ACCUM_FREQUENCY
)
friendly_name = 'Statistics acummulator'
def run(self):
try:
StatsManager.manager().acummulate(STATS_ACCUM_MAX_CHUNK_TIME.getInt())
StatsManager.manager().acummulate(config.GlobalConfig.STATS_ACCUM_MAX_CHUNK_TIME.getInt())
except Exception as e:
logger.exception('Compressing counters')

View File

@ -1,6 +1,8 @@
# Generated by Django 3.2.10 on 2022-07-04 21:20
from django.db import migrations, models
from django.db import connection
import django.db.models.deletion
import uds.models.stats_counters_accum
@ -8,21 +10,20 @@ import uds.models.stats_counters_accum
# InnoDB is tremendlously slow when using this table
def forwards(apps, schema_editor):
try:
from django.db import connection
# If we are not using MySQL, do nothing
if connection.vendor != 'mysql':
return
cursor = connection.cursor()
# Check current table type, if it is not InnoDB, do nothing
cursor.execute(
'SELECT ENGINE FROM information_schema.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = "uds_stats_c"'
)
if cursor.fetchone()[0] == 'InnoDB': # type: ignore
tables_to_change = ['uds_stats_c', 'uds_stats_c_accum']
for table in tables_to_change:
# Check current table type, if it is not InnoDB, do nothing
cursor.execute(
'ALTER TABLE uds_stats_c ENGINE=MyISAM'
'SELECT ENGINE FROM information_schema.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = %s',
[table],
)
if cursor.fetchone()[0] == 'InnoDB': # type: ignore
cursor.execute(f'ALTER TABLE {table} ENGINE=MyISAM')
except Exception: # nosec: fine
pass
@ -97,7 +98,7 @@ class Migration(migrations.Migration):
('owner_id', models.IntegerField(default=0)),
('owner_type', models.SmallIntegerField(default=0)),
('counter_type', models.SmallIntegerField(default=0)),
('interval_type', models.SmallIntegerField(choices=[(2, 'HOUR'), (3, 'DAY')], default=uds.models.stats_counters_accum.StatsCountersAccum.IntervalType['HOUR'])),
('interval_type', models.SmallIntegerField(choices=[(1, 'HOUR'), (2, 'DAY')], default=uds.models.stats_counters_accum.StatsCountersAccum.IntervalType['HOUR'])),
('stamp', models.IntegerField(default=0)),
('v_count', models.IntegerField(default=0)),
('v_sum', models.IntegerField(default=0)),

View File

@ -216,7 +216,7 @@ class StatsCountersAccum(models.Model):
)
"""Stores accumulated data in StatsCountersAccum"""
# Acummulate data
# Acummulate data, only register if there is data
accumulated: typing.List[StatsCountersAccum] = [
StatsCountersAccum(
owner_type=rec['owner_type'],
@ -230,12 +230,14 @@ class StatsCountersAccum(models.Model):
v_min=rec['min'],
v_max=rec['max'],
)
for rec in query
for rec in query if rec['sum'] and rec['min'] and rec['max']
]
logger.debug('Inserting %s records', len(accumulated))
# If we have more than 20 inserts, do it in a single query
StatsCountersAccum.objects.bulk_create(accumulated)
# Insert in chunks of 2500 records
while accumulated:
StatsCountersAccum.objects.bulk_create(accumulated[:2500])
accumulated = accumulated[2500:]
def __str__(self) -> str:
return f'{datetime.datetime.fromtimestamp(self.stamp)} - {self.owner_type}:{self.owner_id}:{self.counter_type} {StatsCountersAccum.IntervalType(self.interval_type)} {self.v_count},{self.v_sum},{self.v_min},{self.v_max}'