mirror of
https://github.com/dkmstr/openuds.git
synced 2025-01-03 01:17:56 +03:00
Refactor variable names for better readability and consistency
Refactor stats_collector.py for improved performance and readability Add ITEMS_LIMIT constant to rest.py Add AccumStat dataclass to stats.py Update get_servicepools_counters method in system.py Update CountersPoolAssigned class in pools_usage_day.py Update actor_data method in linux_ad_osmanager.py Remove AccumStat dataclass from stats.py
This commit is contained in:
parent
66dcad2d8d
commit
a8f8568c34
141
server/src/uds/REST/methods/stats.py
Normal file
141
server/src/uds/REST/methods/stats.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014-2019 Virtual Cable S.L.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
# are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
# * Neither the name of Virtual Cable S.L.U. nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software
|
||||||
|
# without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import datetime
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
|
from uds.REST import Handler
|
||||||
|
from uds import models
|
||||||
|
from uds.core.util.stats import counters
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Enclosed methods under /cache path
|
||||||
|
class Stats(Handler):
|
||||||
|
authenticated = True
|
||||||
|
needs_admin = True
|
||||||
|
|
||||||
|
help_paths = [
|
||||||
|
('', 'Returns the last day usage statistics for all authenticators'),
|
||||||
|
]
|
||||||
|
help_text = 'Provides access to usage statistics'
|
||||||
|
|
||||||
|
def _usage_stats(self, since: datetime.datetime) -> dict[str, list[dict[str, typing.Any]]]:
|
||||||
|
"""
|
||||||
|
Returns usage stats
|
||||||
|
"""
|
||||||
|
auths: dict[str, list[dict[str, typing.Any]]] = {}
|
||||||
|
for a in models.Authenticator.objects.all():
|
||||||
|
services: typing.Optional[types.stats.AccumStat] = None
|
||||||
|
userservices: typing.Optional[types.stats.AccumStat] = None
|
||||||
|
stats: list[dict[str, typing.Any]] = []
|
||||||
|
|
||||||
|
services_counter_iterator = counters.enumerate_accumulated_counters(
|
||||||
|
interval_type=models.StatsCountersAccum.IntervalType.HOUR,
|
||||||
|
counter_type=types.stats.CounterType.AUTH_SERVICES,
|
||||||
|
owner_id=a.id,
|
||||||
|
since=since,
|
||||||
|
infer_owner_type_from=a, # To infer the owner type
|
||||||
|
)
|
||||||
|
|
||||||
|
user_with_servicescount_iter = iter(
|
||||||
|
counters.enumerate_accumulated_counters(
|
||||||
|
interval_type=models.StatsCountersAccum.IntervalType.HOUR,
|
||||||
|
counter_type=types.stats.CounterType.AUTH_USERS_WITH_SERVICES,
|
||||||
|
owner_id=a.id,
|
||||||
|
since=since,
|
||||||
|
infer_owner_type_from=a, # To infer the owner type
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for user_counter in counters.enumerate_accumulated_counters(
|
||||||
|
interval_type=models.StatsCountersAccum.IntervalType.HOUR,
|
||||||
|
counter_type=types.stats.CounterType.AUTH_USERS,
|
||||||
|
owner_id=a.id,
|
||||||
|
since=since,
|
||||||
|
infer_owner_type_from=a, # To infer the owner type
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
services_counter = next(services_counter_iterator)
|
||||||
|
if services_counter.stamp >= user_counter.stamp:
|
||||||
|
break
|
||||||
|
if user_counter.stamp == services_counter.stamp:
|
||||||
|
services = services_counter
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
uservices_counter = next(user_with_servicescount_iter)
|
||||||
|
if uservices_counter.stamp >= user_counter.stamp:
|
||||||
|
break
|
||||||
|
if user_counter.stamp == uservices_counter.stamp:
|
||||||
|
userservices = uservices_counter
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Update last seen date
|
||||||
|
stats.append(
|
||||||
|
{
|
||||||
|
'stamp': user_counter.stamp,
|
||||||
|
'users': (
|
||||||
|
{'min': user_counter.min, 'max': user_counter.max, 'sum': user_counter.sum}
|
||||||
|
if user_counter
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
'services': (
|
||||||
|
{'min': services.min, 'max': services.max, 'sum': services.sum}
|
||||||
|
if services
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
'user_services': (
|
||||||
|
{'min': userservices.min, 'max': userservices.max, 'sum': userservices.sum}
|
||||||
|
if userservices
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# print(len(stats), stats[-1], datetime.datetime.fromtimestamp(lastSeen), since)
|
||||||
|
auths[a.uuid] = stats
|
||||||
|
|
||||||
|
return auths
|
||||||
|
|
||||||
|
def get(self) -> typing.Any:
|
||||||
|
"""
|
||||||
|
Processes get method. Basically, clears & purges the cache, no matter what params
|
||||||
|
"""
|
||||||
|
# Default returns usage stats for last day
|
||||||
|
return self._usage_stats(datetime.datetime.now() - datetime.timedelta(days=1))
|
@ -81,7 +81,7 @@ def get_servicepools_counters(
|
|||||||
else:
|
else:
|
||||||
us = servicepool
|
us = servicepool
|
||||||
|
|
||||||
stats = counters.get_accumulated_counters(
|
stats = counters.enumerate_accumulated_counters(
|
||||||
interval_type=models.StatsCountersAccum.IntervalType.DAY,
|
interval_type=models.StatsCountersAccum.IntervalType.DAY,
|
||||||
counter_type=counter_type,
|
counter_type=counter_type,
|
||||||
owner_type=types.stats.CounterOwnerType.SERVICEPOOL,
|
owner_type=types.stats.CounterOwnerType.SERVICEPOOL,
|
||||||
@ -159,7 +159,7 @@ class System(Handler):
|
|||||||
needs_staff = True
|
needs_staff = True
|
||||||
|
|
||||||
help_paths = [
|
help_paths = [
|
||||||
('overview', ''),
|
('', ''),
|
||||||
('stats/assigned', ''),
|
('stats/assigned', ''),
|
||||||
('stats/inuse', ''),
|
('stats/inuse', ''),
|
||||||
('stats/cached', ''),
|
('stats/cached', ''),
|
||||||
|
@ -47,3 +47,5 @@ class _NotFound:
|
|||||||
|
|
||||||
|
|
||||||
NOT_FOUND: typing.Final[_NotFound] = _NotFound()
|
NOT_FOUND: typing.Final[_NotFound] = _NotFound()
|
||||||
|
|
||||||
|
ITEMS_LIMIT: typing.Final[int] = 4400
|
||||||
|
@ -30,7 +30,6 @@
|
|||||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||||
"""
|
"""
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import dataclasses
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
@ -59,15 +58,6 @@ _REVERSE_FLDS_EQUIV: typing.Final[collections.abc.Mapping[str, str]] = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class AccumStat:
|
|
||||||
stamp: int
|
|
||||||
count: int # Number of elements in this interval
|
|
||||||
sum: int # Sum of elements in this interval
|
|
||||||
max: int # Max of elements in this interval
|
|
||||||
min: int # Min of elements in this interval
|
|
||||||
|
|
||||||
|
|
||||||
class StatsManager(metaclass=singleton.Singleton):
|
class StatsManager(metaclass=singleton.Singleton):
|
||||||
"""
|
"""
|
||||||
Manager for statistics, so we can provide usefull info about platform usage
|
Manager for statistics, so we can provide usefull info about platform usage
|
||||||
@ -76,6 +66,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
that has counters (such as how many users is at a time active at platform, how many services
|
that has counters (such as how many users is at a time active at platform, how many services
|
||||||
are assigned, are in use, in cache, etc...
|
are assigned, are in use, in cache, etc...
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def manager() -> 'StatsManager':
|
def manager() -> 'StatsManager':
|
||||||
return StatsManager() # Singleton pattern will return always the same instance
|
return StatsManager() # Singleton pattern will return always the same instance
|
||||||
@ -135,8 +126,8 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
|
|
||||||
def enumerate_counters(
|
def enumerate_counters(
|
||||||
self,
|
self,
|
||||||
ownerType: int,
|
owner_type: int,
|
||||||
counterType: int,
|
counter_type: int,
|
||||||
ownerIds: typing.Union[collections.abc.Iterable[int], int, None],
|
ownerIds: typing.Union[collections.abc.Iterable[int], int, None],
|
||||||
since: datetime.datetime,
|
since: datetime.datetime,
|
||||||
to: datetime.datetime,
|
to: datetime.datetime,
|
||||||
@ -165,8 +156,8 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
to_stamp = int(time.mktime(to.timetuple()))
|
to_stamp = int(time.mktime(to.timetuple()))
|
||||||
|
|
||||||
return StatsCounters.get_grouped(
|
return StatsCounters.get_grouped(
|
||||||
ownerType,
|
owner_type,
|
||||||
counterType,
|
counter_type,
|
||||||
owner_id=ownerIds,
|
owner_id=ownerIds,
|
||||||
since=since_stamp,
|
since=since_stamp,
|
||||||
to=to_stamp,
|
to=to_stamp,
|
||||||
@ -184,7 +175,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
owner_id: typing.Optional[int] = None,
|
owner_id: typing.Optional[int] = None,
|
||||||
since: typing.Optional[typing.Union[datetime.datetime, int]] = None,
|
since: typing.Optional[typing.Union[datetime.datetime, int]] = None,
|
||||||
points: typing.Optional[int] = None,
|
points: typing.Optional[int] = None,
|
||||||
) -> typing.Generator[AccumStat, None, None]:
|
) -> typing.Generator[types.stats.AccumStat, None, None]:
|
||||||
if since is None:
|
if since is None:
|
||||||
if points is None:
|
if points is None:
|
||||||
points = 100 # If since is not specified, we need at least points, get a default
|
points = 100 # If since is not specified, we need at least points, get a default
|
||||||
@ -203,12 +194,13 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
query = query.filter(owner_type=owner_type)
|
query = query.filter(owner_type=owner_type)
|
||||||
if owner_id is not None:
|
if owner_id is not None:
|
||||||
query = query.filter(owner_id=owner_id)
|
query = query.filter(owner_id=owner_id)
|
||||||
|
# If points is NONE, we get all data
|
||||||
query = query[:points]
|
query = query[:points]
|
||||||
|
|
||||||
# Yields all data, stamp, n, sum, max, min (stamp, v_count,v_sum,v_max,v_min)
|
# Yields all data, stamp, n, sum, max, min (stamp, v_count,v_sum,v_max,v_min)
|
||||||
# Now, get exactly the points we need
|
# Now, get exactly the points we need
|
||||||
stamp = since
|
stamp = since
|
||||||
last = AccumStat(stamp, 0, 0, 0, 0)
|
last = types.stats.AccumStat(stamp, 0, 0, 0, 0)
|
||||||
for rec in query:
|
for rec in query:
|
||||||
# While query stamp is greater than stamp, repeat last AccumStat
|
# While query stamp is greater than stamp, repeat last AccumStat
|
||||||
while rec.stamp > stamp:
|
while rec.stamp > stamp:
|
||||||
@ -218,7 +210,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
last.stamp = stamp
|
last.stamp = stamp
|
||||||
# The record to be emmitted is the current one, but replace record stamp with current stamp
|
# The record to be emmitted is the current one, but replace record stamp with current stamp
|
||||||
# The recor is for sure the first one previous to stamp (we have emmited last record until we reach this one)
|
# The recor is for sure the first one previous to stamp (we have emmited last record until we reach this one)
|
||||||
last = AccumStat(
|
last = types.stats.AccumStat(
|
||||||
stamp,
|
stamp,
|
||||||
rec.v_count,
|
rec.v_count,
|
||||||
rec.v_sum,
|
rec.v_sum,
|
||||||
@ -244,7 +236,11 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
|
|
||||||
# Event stats
|
# Event stats
|
||||||
def add_event(
|
def add_event(
|
||||||
self, owner_type: types.stats.EventOwnerType, owner_id: int, event_type: types.stats.EventType, **kwargs: typing.Any
|
self,
|
||||||
|
owner_type: types.stats.EventOwnerType,
|
||||||
|
owner_id: int,
|
||||||
|
event_type: types.stats.EventType,
|
||||||
|
**kwargs: typing.Any,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Adds a new event stat to database.
|
Adds a new event stat to database.
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
"""
|
"""
|
||||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||||
"""
|
"""
|
||||||
|
import dataclasses
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
|
|
||||||
@ -135,3 +136,15 @@ class CounterOwnerType(enum.IntEnum):
|
|||||||
@property
|
@property
|
||||||
def owner_name(self) -> str:
|
def owner_name(self) -> str:
|
||||||
return self.name.capitalize()
|
return self.name.capitalize()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class AccumStat:
|
||||||
|
"""
|
||||||
|
Accumulated statistics for a given interval, as stored in the database
|
||||||
|
"""
|
||||||
|
stamp: int
|
||||||
|
count: int # Number of elements in this interval
|
||||||
|
sum: int # Sum of elements in this interval
|
||||||
|
max: int # Max of elements in this interval
|
||||||
|
min: int # Min of elements in this interval
|
||||||
|
@ -36,7 +36,8 @@ import collections.abc
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
|
|
||||||
from uds.core.managers.stats import StatsManager, AccumStat
|
from uds.core.managers.stats import StatsManager
|
||||||
|
from uds.core.types.stats import AccumStat
|
||||||
from uds.models import (
|
from uds.models import (
|
||||||
Provider,
|
Provider,
|
||||||
Service,
|
Service,
|
||||||
@ -73,7 +74,10 @@ def _get_prov_serv_pool_ids(provider: 'Provider') -> tuple[int, ...]:
|
|||||||
|
|
||||||
|
|
||||||
_id_retriever: typing.Final[
|
_id_retriever: typing.Final[
|
||||||
collections.abc.Mapping[type[Model], collections.abc.Mapping[int, collections.abc.Callable[[typing.Any], typing.Any]]]
|
collections.abc.Mapping[
|
||||||
|
type[Model],
|
||||||
|
collections.abc.Mapping[types.stats.CounterType, collections.abc.Callable[[typing.Any], typing.Any]],
|
||||||
|
]
|
||||||
] = {
|
] = {
|
||||||
Provider: {
|
Provider: {
|
||||||
types.stats.CounterType.LOAD: _get_id,
|
types.stats.CounterType.LOAD: _get_id,
|
||||||
@ -98,7 +102,9 @@ _id_retriever: typing.Final[
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
_valid_model_for_counterype: typing.Final[collections.abc.Mapping[int, tuple[type[Model], ...]]] = {
|
_valid_model_for_counterype: typing.Final[
|
||||||
|
collections.abc.Mapping[types.stats.CounterType, tuple[type[Model], ...]]
|
||||||
|
] = {
|
||||||
types.stats.CounterType.LOAD: (Provider,),
|
types.stats.CounterType.LOAD: (Provider,),
|
||||||
types.stats.CounterType.STORAGE: (Service,),
|
types.stats.CounterType.STORAGE: (Service,),
|
||||||
types.stats.CounterType.ASSIGNED: (ServicePool,),
|
types.stats.CounterType.ASSIGNED: (ServicePool,),
|
||||||
@ -119,8 +125,8 @@ _obj_type_from_model: typing.Final[collections.abc.Mapping[type[Model], types.st
|
|||||||
|
|
||||||
def add_counter(
|
def add_counter(
|
||||||
obj: CounterClass,
|
obj: CounterClass,
|
||||||
counterType: types.stats.CounterType,
|
counter_type: types.stats.CounterType,
|
||||||
counterValue: int,
|
value: int,
|
||||||
stamp: typing.Optional[datetime.datetime] = None,
|
stamp: typing.Optional[datetime.datetime] = None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
@ -133,22 +139,31 @@ def add_counter(
|
|||||||
note: Runtime checks are done so if we try to insert an unssuported stat, this won't be inserted and it will be logged
|
note: Runtime checks are done so if we try to insert an unssuported stat, this won't be inserted and it will be logged
|
||||||
"""
|
"""
|
||||||
type_ = type(obj)
|
type_ = type(obj)
|
||||||
if type_ not in _valid_model_for_counterype.get(counterType, ()): # pylint: disable
|
if type_ not in _valid_model_for_counterype.get(counter_type, ()): # pylint: disable
|
||||||
logger.error(
|
logger.error(
|
||||||
'Type %s does not accepts counter of type %s',
|
'Type %s does not accepts counter of type %s',
|
||||||
type_,
|
type_,
|
||||||
counterValue,
|
value,
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return StatsManager.manager().add_counter(
|
return StatsManager.manager().add_counter(
|
||||||
_obj_type_from_model[type(obj)], obj.id, counterType, counterValue, stamp
|
_obj_type_from_model[type(obj)], obj.id, counter_type, value, stamp
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def enumerate_counters(
|
def enumerate_counters(
|
||||||
obj: CounterClass, counterType: types.stats.CounterType, **kwargs: typing.Any
|
obj: CounterClass,
|
||||||
|
counter_type: types.stats.CounterType,
|
||||||
|
*,
|
||||||
|
since: typing.Optional[datetime.datetime] = None,
|
||||||
|
to: typing.Optional[datetime.datetime] = None,
|
||||||
|
interval: typing.Optional[int] = None,
|
||||||
|
max_intervals: typing.Optional[int] = None,
|
||||||
|
limit: typing.Optional[int] = None,
|
||||||
|
use_max: bool = False,
|
||||||
|
all: bool = False,
|
||||||
) -> typing.Generator[tuple[datetime.datetime, int], None, None]:
|
) -> typing.Generator[tuple[datetime.datetime, int], None, None]:
|
||||||
"""
|
"""
|
||||||
Get counters
|
Get counters
|
||||||
@ -164,10 +179,6 @@ def enumerate_counters(
|
|||||||
Returns:
|
Returns:
|
||||||
A generator, that contains pairs of (stamp, value) tuples
|
A generator, that contains pairs of (stamp, value) tuples
|
||||||
"""
|
"""
|
||||||
since = kwargs.get('since') or consts.NEVER
|
|
||||||
to = kwargs.get('to') or datetime.datetime.now()
|
|
||||||
limit = kwargs.get('limit')
|
|
||||||
use_max = kwargs.get('use_max', False)
|
|
||||||
type_ = type(obj)
|
type_ = type(obj)
|
||||||
|
|
||||||
read_fnc_tbl = _id_retriever.get(type_)
|
read_fnc_tbl = _id_retriever.get(type_)
|
||||||
@ -176,39 +187,44 @@ def enumerate_counters(
|
|||||||
logger.error('Type %s has no registered stats', type_)
|
logger.error('Type %s has no registered stats', type_)
|
||||||
return
|
return
|
||||||
|
|
||||||
fnc = read_fnc_tbl.get(counterType)
|
fnc = read_fnc_tbl.get(counter_type)
|
||||||
|
|
||||||
if not fnc:
|
if not fnc:
|
||||||
logger.error('Type %s has no registerd stats of type %s', type_, counterType)
|
logger.error('Type %s has no registerd stats of type %s', type_, counter_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not kwargs.get('all', False):
|
if not all:
|
||||||
owner_ids = fnc(obj) # pyright: ignore
|
owner_ids = fnc(obj) # pyright: ignore
|
||||||
else:
|
else:
|
||||||
owner_ids = None
|
owner_ids = None
|
||||||
|
|
||||||
for i in StatsManager.manager().enumerate_counters(
|
for i in StatsManager.manager().enumerate_counters(
|
||||||
_obj_type_from_model[type(obj)],
|
_obj_type_from_model[type(obj)],
|
||||||
counterType,
|
counter_type,
|
||||||
owner_ids,
|
owner_ids,
|
||||||
since,
|
since or consts.NEVER,
|
||||||
to,
|
to or datetime.datetime.now(),
|
||||||
kwargs.get('interval'),
|
interval,
|
||||||
kwargs.get('max_intervals'),
|
max_intervals,
|
||||||
limit,
|
limit,
|
||||||
use_max,
|
use_max,
|
||||||
):
|
):
|
||||||
yield (datetime.datetime.fromtimestamp(i[0]), i[1])
|
yield (datetime.datetime.fromtimestamp(i[0]), i[1])
|
||||||
|
|
||||||
|
|
||||||
def get_accumulated_counters(
|
def enumerate_accumulated_counters(
|
||||||
interval_type: StatsCountersAccum.IntervalType,
|
interval_type: StatsCountersAccum.IntervalType,
|
||||||
counter_type: types.stats.CounterType,
|
counter_type: types.stats.CounterType,
|
||||||
owner_type: typing.Optional[types.stats.CounterOwnerType] = None,
|
owner_type: typing.Optional[types.stats.CounterOwnerType] = None,
|
||||||
owner_id: typing.Optional[int] = None,
|
owner_id: typing.Optional[int] = None,
|
||||||
since: typing.Optional[typing.Union[datetime.datetime, int]] = None,
|
since: typing.Optional[typing.Union[datetime.datetime, int]] = None,
|
||||||
points: typing.Optional[int] = None,
|
points: typing.Optional[int] = None,
|
||||||
|
*,
|
||||||
|
infer_owner_type_from: typing.Optional[CounterClass] = None,
|
||||||
) -> typing.Generator[AccumStat, None, None]:
|
) -> typing.Generator[AccumStat, None, None]:
|
||||||
|
if not owner_type and infer_owner_type_from:
|
||||||
|
owner_type = _obj_type_from_model[type(infer_owner_type_from)]
|
||||||
|
|
||||||
yield from StatsManager.manager().get_accumulated_counters(
|
yield from StatsManager.manager().get_accumulated_counters(
|
||||||
intervalType=interval_type,
|
intervalType=interval_type,
|
||||||
counterType=counter_type,
|
counterType=counter_type,
|
||||||
|
@ -157,11 +157,11 @@ class LinuxOsADManager(LinuxOsManager):
|
|||||||
raise exceptions.ui.ValidationError(_('Must provide a password for the account!'))
|
raise exceptions.ui.ValidationError(_('Must provide a password for the account!'))
|
||||||
self.ou.value = self.ou.value.strip()
|
self.ou.value = self.ou.value.strip()
|
||||||
|
|
||||||
def actor_data(self, userservice: 'UserService') -> dict[str, typing.Any]:
|
def actor_data(self, userservice: 'UserService') -> types.osmanagers.ActorData:
|
||||||
return {
|
return types.osmanagers.ActorData(
|
||||||
'action': 'rename_ad',
|
action='rename_ad',
|
||||||
'name': userservice.get_name(),
|
name=userservice.get_name(),
|
||||||
'custom': {
|
custom={
|
||||||
'domain': self.domain.as_str(),
|
'domain': self.domain.as_str(),
|
||||||
'username': self.account.as_str(),
|
'username': self.account.as_str(),
|
||||||
'password': self.password.as_str(),
|
'password': self.password.as_str(),
|
||||||
@ -173,4 +173,4 @@ class LinuxOsADManager(LinuxOsManager):
|
|||||||
'ssl': self.use_ssl.as_bool(),
|
'ssl': self.use_ssl.as_bool(),
|
||||||
'automatic_id_mapping': self.automatic_id_mapping.as_bool(),
|
'automatic_id_mapping': self.automatic_id_mapping.as_bool(),
|
||||||
},
|
},
|
||||||
}
|
)
|
||||||
|
@ -81,13 +81,16 @@ class CountersPoolAssigned(StatsReport):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
hours = [0] * 24
|
hours = [0] * 24
|
||||||
|
|
||||||
|
# Convert start to datetime
|
||||||
|
start_datetime = datetime.datetime.combine(start, datetime.time.min)
|
||||||
|
|
||||||
for x in counters.enumerate_counters(
|
for x in counters.enumerate_counters(
|
||||||
pool,
|
pool,
|
||||||
counters.types.stats.CounterType.ASSIGNED,
|
counters.types.stats.CounterType.ASSIGNED,
|
||||||
since=start,
|
since=start_datetime,
|
||||||
to=start + datetime.timedelta(days=1),
|
to=start_datetime + datetime.timedelta(days=1),
|
||||||
intervals=3600,
|
interval=3600,
|
||||||
use_max=True,
|
use_max=True,
|
||||||
all=False,
|
all=False,
|
||||||
):
|
):
|
||||||
|
Loading…
Reference in New Issue
Block a user