1
0
mirror of https://github.com/dkmstr/openuds.git synced 2024-12-23 17:34:17 +03:00

Fixing up stats counters accumulators

This commit is contained in:
Adolfo Gómez García 2022-11-09 23:02:41 +01:00
parent 1cf2e2cd76
commit 721d32c972
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
3 changed files with 30 additions and 13 deletions

View File

@ -463,7 +463,20 @@ class GlobalConfig:
STATS_DURATION: Config.Value = Config.section(GLOBAL_SECTION).value(
'statsDuration', '365', type=Config.NUMERIC_FIELD
)
# If disallow login using /login url, and must go to an authenticator
# Statisctis accumulation frequency, in seconds
STATS_ACCUM_FREQUENCY: Config.Value = Config.section(GLOBAL_SECTION).value(
'statsAccumFrequency',
'14400',
type=Config.NUMERIC_FIELD,
)
# Statisctis accumulation chunk size, in days
STATS_ACCUM_MAX_CHUNK_TIME = Config.section(GLOBAL_SECTION).value(
'statsAccumMaxChunkTime',
'7',
type=Config.NUMERIC_FIELD,
)
# If disallow login showing authenticatiors
DISALLOW_GLOBAL_LOGIN: Config.Value = Config.section(GLOBAL_SECTION).value(
'disallowGlobalLogin', '0', type=Config.BOOLEAN_FIELD
)
@ -591,6 +604,7 @@ class GlobalConfig:
for v in GlobalConfig.__dict__.values():
if isinstance(v, Config.Value):
v.get()
logger.debug('Initialized global config value %s=%s', v.key(), v.get())
for c in _getLater:
logger.debug('Get later: %s', c)

View File

@ -1,6 +1,8 @@
# Generated by Django 3.2.10 on 2022-07-04 21:20
from django.db import migrations, models
from django.db import connection
import django.db.models.deletion
import uds.models.stats_counters_accum
@ -8,21 +10,20 @@ import uds.models.stats_counters_accum
# InnoDB is tremendlously slow when using this table
def forwards(apps, schema_editor):
try:
from django.db import connection
# If we are not using MySQL, do nothing
if connection.vendor != 'mysql':
return
cursor = connection.cursor()
# Check current table type, if it is not InnoDB, do nothing
cursor.execute(
'SELECT ENGINE FROM information_schema.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = "uds_stats_c"'
)
if cursor.fetchone()[0] == 'InnoDB': # type: ignore
tables_to_change = ['uds_stats_c', 'uds_stats_c_accum']
for table in tables_to_change:
# Check current table type, if it is not InnoDB, do nothing
cursor.execute(
'ALTER TABLE uds_stats_c ENGINE=MyISAM'
'SELECT ENGINE FROM information_schema.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = %s',
[table],
)
if cursor.fetchone()[0] == 'InnoDB': # type: ignore
cursor.execute(f'ALTER TABLE {table} ENGINE=MyISAM')
except Exception: # nosec: fine
pass

View File

@ -216,7 +216,7 @@ class StatsCountersAccum(models.Model):
)
"""Stores accumulated data in StatsCountersAccum"""
# Acummulate data
# Acummulate data, only register if there is data
accumulated: typing.List[StatsCountersAccum] = [
StatsCountersAccum(
owner_type=rec['owner_type'],
@ -230,12 +230,14 @@ class StatsCountersAccum(models.Model):
v_min=rec['min'],
v_max=rec['max'],
)
for rec in query
for rec in query if rec['sum'] and rec['min'] and rec['max']
]
logger.debug('Inserting %s records', len(accumulated))
# If we have more than 20 inserts, do it in a single query
StatsCountersAccum.objects.bulk_create(accumulated)
# Insert in chunks of 2500 records
while accumulated:
StatsCountersAccum.objects.bulk_create(accumulated[:2500])
accumulated = accumulated[2500:]
def __str__(self) -> str:
return f'{datetime.datetime.fromtimestamp(self.stamp)} - {self.owner_type}:{self.owner_id}:{self.counter_type} {StatsCountersAccum.IntervalType(self.interval_type)} {self.v_count},{self.v_sum},{self.v_min},{self.v_max}'