Added stats to udsfs

This commit is contained in:
Adolfo Gómez García 2021-11-08 11:55:59 +01:00
parent e3568bc580
commit 7f4453bd37
3 changed files with 75 additions and 10 deletions

View File

@ -5,10 +5,12 @@ import typing
import logging import logging
from uds import models from uds import models
from uds.core.util.stats.events import EVENT_NAMES, getOwner from uds.core.util.stats.events import EVENT_NAMES
from uds.core.util.cache import Cache
from . import types from . import types
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Custom types # Custom types
@ -68,6 +70,7 @@ class StatsFS(types.UDSFSInterface):
} }
_dispatchers: typing.Mapping[str, typing.Tuple[DispatcherType, bool]] _dispatchers: typing.Mapping[str, typing.Tuple[DispatcherType, bool]]
_cache: typing.ClassVar[Cache] = Cache('fsevents')
def __init__(self) -> None: def __init__(self) -> None:
# Initialize _dispatchers, Second element of tuple is True if the dispatcher has "intervals" # Initialize _dispatchers, Second element of tuple is True if the dispatcher has "intervals"
@ -105,7 +108,7 @@ class StatsFS(types.UDSFSInterface):
range = self._interval[interval] range = self._interval[interval]
else: else:
range = (StatsFS._interval['today']) # Does not matter, it's just a placeholder range = (StatsFS._interval['lastmonth']) # Any value except "today" will do the trick
extension = interval extension = interval
if extension != 'csv': if extension != 'csv':
@ -146,17 +149,33 @@ class StatsFS(types.UDSFSInterface):
dispatcher, interval, extension = self.getFilenameComponents(path) dispatcher, interval, extension = self.getFilenameComponents(path)
# if interval is today, cache time is 10 seconds, else cache time is 60 seconds
if interval == StatsFS._interval['today']:
cacheTime = 10
else:
cacheTime = 60
# Check if the file info is cached
cached = self._cache.get(path[0])
if cached is not None:
logger.debug('Cache hit for %s', path[0])
return cached
# Calculate the size of the file # Calculate the size of the file
size = len(dispatcher(interval, extension, 0, 0)) size = len(dispatcher(interval, extension, 0, 0))
logger.debug('Size of %s: %s', path[0], size) logger.debug('Size of %s: %s', path[0], size)
return types.StatType( data = types.StatType(
st_mode=(stat.S_IFREG | 0o755), st_mode=(stat.S_IFREG | 0o755),
st_nlink=1, st_nlink=1,
st_size=size, st_size=size,
st_mtime=interval.start_poxix, st_mtime=interval.start_poxix,
) )
# store in cache
self._cache.put(path[0], data, cacheTime)
return data
def read(self, path: typing.List[str], size: int, offset: int) -> bytes: def read(self, path: typing.List[str], size: int, offset: int) -> bytes:
logger.debug('Reading data from %s: offset: %s, size: %s', path, offset, size) logger.debug('Reading data from %s: offset: %s, size: %s', path, offset, size)
@ -192,5 +211,9 @@ class StatsFS(types.UDSFSInterface):
self, interval: StatInterval, extension: str, size: int, offset: int self, interval: StatInterval, extension: str, size: int, offset: int
) -> bytes: ) -> bytes:
logger.debug('Reading pools. Interval=%s, extension=%s, offset: %s, size: %s', interval, extension, offset, size) logger.debug('Reading pools. Interval=%s, extension=%s, offset: %s, size: %s', interval, extension, offset, size)
return b'xxxx' # Compose the csv file from what we now of service pools
virtualFile = models.ServicePool.getCSVHeader().encode() + b'\n'
# First, get the list of service pools
for pool in models.ServicePool.objects.all().order_by('name'):
virtualFile += pool.toCsv().encode() + b'\n'
return virtualFile

View File

@ -502,9 +502,7 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
raise InvalidServiceException() raise InvalidServiceException()
def validateTransport(self, transport) -> None: def validateTransport(self, transport) -> None:
try: if self.transports.filter(id=transport.id).count() == 0:
self.transports.get(id=transport.id)
except:
raise InvalidServiceException() raise InvalidServiceException()
def validateUser(self, user: 'User') -> None: def validateUser(self, user: 'User') -> None:
@ -659,6 +657,7 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
""" """
Returns the % used services, related to "maximum" user services Returns the % used services, related to "maximum" user services
If no "maximum" number of services, will return 0% ofc If no "maximum" number of services, will return 0% ofc
cachedValue is used to optimize (if known the number of assigned services, we can avoid to query the db)
""" """
maxs = self.max_srvs maxs = self.max_srvs
if maxs == 0: if maxs == 0:
@ -711,6 +710,35 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
# Clears related permissions # Clears related permissions
clean(toDelete) clean(toDelete)
# returns CSV header
@staticmethod
def getCSVHeader(sep: str = ',') -> str:
return sep.join(
[
'name',
'initial',
'cache_l1',
'cache_l2',
'max',
'assigned_services',
'cached_services',
]
)
# Return record as csv line using separator (default: ',')
def toCsv(self, sep: str = ',') -> str:
return sep.join(
[
self.name,
str(self.initial_srvs),
str(self.cache_l1_srvs),
str(self.cache_l2_srvs),
str(self.max_srvs),
str(self.assignedUserServices().count()),
str(self.cachedUserServices().count()),
]
)
def __str__(self): def __str__(self):
return 'Deployed service {}({}) with {} as initial, {} as L1 cache, {} as L2 cache, {} as max'.format( return 'Deployed service {}({}) with {} as initial, {} as L1 cache, {} as L2 cache, {} as max'.format(
self.name, self.name,

View File

@ -42,6 +42,7 @@ from .util import getSqlDatetimeAsUnix
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class StatsEvents(models.Model): class StatsEvents(models.Model):
""" """
Statistics about events (login, logout, whatever...) Statistics about events (login, logout, whatever...)
@ -135,8 +136,21 @@ class StatsEvents(models.Model):
# returns CSV header # returns CSV header
@staticmethod @staticmethod
def getCSVHeader() -> str: def getCSVHeader(
return 'owner_type,owner_id,event_type,stamp,field_1,field_2,field_3,field_4' sep: str = '',
) -> str:
return sep.join(
[
'owner_type',
'owner_id',
'event_type',
'stamp',
'field_1',
'field_2',
'field_3',
'field_4',
]
)
# Return record as csv line using separator (default: ',') # Return record as csv line using separator (default: ',')
def toCsv(self, sep: str = ',') -> str: def toCsv(self, sep: str = ',') -> str: