1
0
mirror of https://github.com/dkmstr/openuds.git synced 2025-01-03 01:17:56 +03:00

Refactor StatsManager and StatsCountersAccum for improved data handling and logging

This commit is contained in:
Adolfo Gómez García 2024-11-11 18:12:49 +01:00
parent 7311044165
commit 93db2388bb
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
3 changed files with 31 additions and 35 deletions

View File

@ -214,13 +214,17 @@ class StatsManager(metaclass=singleton.Singleton):
# Yields all data, stamp, n, sum, max, min (stamp, v_count,v_sum,v_max,v_min) # Yields all data, stamp, n, sum, max, min (stamp, v_count,v_sum,v_max,v_min)
# Now, get exactly the points we need # Now, get exactly the points we need
# Note that empty values were not saved, so we can find "holes" in the data
# that will be filled with empty values
stamp = since stamp = since
last = types.stats.AccumStat(stamp, 0, 0, 0, 0) last = types.stats.AccumStat(stamp, 0, 0, 0, 0)
for rec in query: for rec in query:
# While query stamp is greater than stamp, repeat last AccumStat # While query stamp is greater than stamp, repeat last AccumStat
while rec.stamp > stamp: while rec.stamp > stamp:
# Yield last value until we reach the record # No values, return empty
yield last yield types.stats.AccumStat(stamp, 0, 0, 0, 0)
stamp += interval_type.seconds() stamp += interval_type.seconds()
last.stamp = stamp last.stamp = stamp
@ -236,17 +240,8 @@ class StatsManager(metaclass=singleton.Singleton):
yield last yield last
stamp += interval_type.seconds() stamp += interval_type.seconds()
# Complete the serie until to
last = types.stats.AccumStat(
stamp,
0,
0,
0,
0,
)
while stamp < to: while stamp < to:
yield last yield types.stats.AccumStat(stamp, 0, 0, 0, 0)
stamp += interval_type.seconds() stamp += interval_type.seconds()
last.stamp = stamp last.stamp = stamp

View File

@ -142,15 +142,13 @@ class StatsCountersAccum(models.Model):
type['StatsCountersAccum'], type['StatsCountersAccum'],
type['StatsCounters'], type['StatsCounters'],
] ]
# If base interval, we will use StatsCounters to create the accum # If base interval (that menas an inteval that must be readed from stats_c),
# we will use StatsCounters to create the accum
# Else, we will use StatsCountersAccum to create the accum from previous interval # Else, we will use StatsCountersAccum to create the accum from previous interval
# (for example, to create daily accum from hourly data) # (for example, to create daily accum from hourly data)
if interval_type.is_base_interval(): model = StatsCounters if interval_type.is_base_interval() else StatsCountersAccum
model = StatsCounters
else:
model = StatsCountersAccum
# Accumulate HOURS from StatsCounters # Accumulate INTERVAL from StatsCounters
interval = interval_type.seconds() interval = interval_type.seconds()
# Get last stamp in table for this interval_type # Get last stamp in table for this interval_type
@ -207,6 +205,7 @@ class StatsCountersAccum(models.Model):
'counter_type': 'counter_type', 'counter_type': 'counter_type',
}, },
) )
.values('group_by_stamp', 'owner_id', 'owner_type', 'counter_type') .values('group_by_stamp', 'owner_id', 'owner_type', 'counter_type')
) )
@ -226,6 +225,8 @@ class StatsCountersAccum(models.Model):
sum=models.Sum('v_sum'), sum=models.Sum('v_sum'),
) )
logger.debug('Query: %s', query.query)
# Stores accumulated data in StatsCountersAccum # Stores accumulated data in StatsCountersAccum
# Acummulate data, only register if there is data # Acummulate data, only register if there is data
accumulated: list[StatsCountersAccum] = [ accumulated: list[StatsCountersAccum] = [

View File

@ -33,7 +33,7 @@ import logging
from uds import models from uds import models
from uds.core.util import model from uds.core.util import model
from uds.core.types.states import State from uds.core import types
from uds.core.util.stats import counters from uds.core.util.stats import counters
from uds.core.managers.stats import StatsManager from uds.core.managers.stats import StatsManager
from uds.core.jobs import Job from uds.core.jobs import Job
@ -54,32 +54,32 @@ class DeployedServiceStatsCollector(Job):
def run(self) -> None: def run(self) -> None:
logger.debug('Starting Deployed service stats collector') logger.debug('Starting Deployed service stats collector')
service_pool_to_check = models.ServicePool.objects.filter(state=State.ACTIVE).iterator() service_pool_to_check = models.ServicePool.objects.filter(state=types.states.State.ACTIVE).iterator()
stamp = model.sql_now() stamp = model.sql_now()
# Global counters # Global counters
total_assigned, total_inuse, total_cached = 0, 0, 0 total_assigned, total_inuse, total_cached = 0, 0, 0
for service_pool in service_pool_to_check: for service_pool in service_pool_to_check:
try: try:
fltr = service_pool.assigned_user_services().exclude( fltr = service_pool.assigned_user_services().exclude(
state__in=State.INFO_STATES state__in=types.states.State.INFO_STATES
) )
assigned = fltr.count() assigned = fltr.count()
in_use = fltr.filter(in_use=True).count() in_use = fltr.filter(in_use=True).count()
# Cached user services # Cached user services
cached = ( cached = (
service_pool.cached_users_services() service_pool.cached_users_services()
.exclude(state__in=State.INFO_STATES) .exclude(state__in=types.states.State.INFO_STATES)
.count() .count()
) )
total_assigned += assigned total_assigned += assigned
total_inuse += in_use total_inuse += in_use
total_cached += cached total_cached += cached
counters.add_counter( counters.add_counter(
service_pool, counters.types.stats.CounterType.ASSIGNED, assigned, stamp=stamp service_pool, types.stats.CounterType.ASSIGNED, assigned, stamp=stamp
) )
counters.add_counter(service_pool, counters.types.stats.CounterType.INUSE, in_use, stamp=stamp) counters.add_counter(service_pool, types.stats.CounterType.INUSE, in_use, stamp=stamp)
counters.add_counter( counters.add_counter(
service_pool, counters.types.stats.CounterType.CACHED, cached, stamp=stamp service_pool, types.stats.CounterType.CACHED, cached, stamp=stamp
) )
except Exception: except Exception:
logger.exception( logger.exception(
@ -88,14 +88,14 @@ class DeployedServiceStatsCollector(Job):
# Store a global "fake pool" with all stats # Store a global "fake pool" with all stats
sp = models.ServicePool() sp = models.ServicePool()
sp.id = -1 sp.id = -1
counters.add_counter(sp, counters.types.stats.CounterType.ASSIGNED, total_assigned, stamp=stamp) counters.add_counter(sp, types.stats.CounterType.ASSIGNED, total_assigned, stamp=stamp)
counters.add_counter(sp, counters.types.stats.CounterType.INUSE, total_inuse, stamp=stamp) counters.add_counter(sp, types.stats.CounterType.INUSE, total_inuse, stamp=stamp)
counters.add_counter(sp, counters.types.stats.CounterType.CACHED, total_cached, stamp=stamp) counters.add_counter(sp, types.stats.CounterType.CACHED, total_cached, stamp=stamp)
total_users, total_assigned, total_users_with_service = 0, 0, 0 total_users, total_assigned, total_users_with_service = 0, 0, 0
for auth in models.Authenticator.objects.all(): for auth in models.Authenticator.objects.all():
fltr_user = auth.users.filter(userServices__isnull=False).exclude( fltr_user = auth.users.filter(userServices__isnull=False).exclude(
userServices__state__in=State.INFO_STATES userServices__state__in=types.states.State.INFO_STATES
).order_by() ).order_by()
users = auth.users.all().count() users = auth.users.all().count()
users_with_service = fltr_user.values('id').distinct().count() # Use "values" to simplify query (only id) users_with_service = fltr_user.values('id').distinct().count() # Use "values" to simplify query (only id)
@ -105,23 +105,23 @@ class DeployedServiceStatsCollector(Job):
total_assigned += number_assigned_services total_assigned += number_assigned_services
total_users_with_service += users_with_service total_users_with_service += users_with_service
counters.add_counter(auth, counters.types.stats.CounterType.AUTH_USERS, users, stamp=stamp) counters.add_counter(auth, types.stats.CounterType.AUTH_USERS, users, stamp=stamp)
counters.add_counter( counters.add_counter(
auth, counters.types.stats.CounterType.AUTH_SERVICES, number_assigned_services, stamp=stamp auth, types.stats.CounterType.AUTH_SERVICES, number_assigned_services, stamp=stamp
) )
counters.add_counter( counters.add_counter(
auth, auth,
counters.types.stats.CounterType.AUTH_USERS_WITH_SERVICES, types.stats.CounterType.AUTH_USERS_WITH_SERVICES,
users_with_service, users_with_service,
stamp=stamp, stamp=stamp,
) )
au = models.Authenticator() au = models.Authenticator()
au.id = -1 au.id = -1
counters.add_counter(au, counters.types.stats.CounterType.AUTH_USERS, total_users, stamp=stamp) counters.add_counter(au, types.stats.CounterType.AUTH_USERS, total_users, stamp=stamp)
counters.add_counter(au, counters.types.stats.CounterType.AUTH_SERVICES, total_assigned, stamp=stamp) counters.add_counter(au, types.stats.CounterType.AUTH_SERVICES, total_assigned, stamp=stamp)
counters.add_counter( counters.add_counter(
au, counters.types.stats.CounterType.AUTH_USERS_WITH_SERVICES, total_users_with_service, stamp=stamp au, types.stats.CounterType.AUTH_USERS_WITH_SERVICES, total_users_with_service, stamp=stamp
) )
logger.debug('Done Deployed service stats collector') logger.debug('Done Deployed service stats collector')