Added stats to udsfs

This commit is contained in:
Adolfo Gómez García 2021-11-08 11:55:59 +01:00
parent e3568bc580
commit 7f4453bd37
3 changed files with 75 additions and 10 deletions

View File

@ -5,10 +5,12 @@ import typing
import logging
from uds import models
from uds.core.util.stats.events import EVENT_NAMES, getOwner
from uds.core.util.stats.events import EVENT_NAMES
from uds.core.util.cache import Cache
from . import types
logger = logging.getLogger(__name__)
# Custom types
@ -68,6 +70,7 @@ class StatsFS(types.UDSFSInterface):
}
_dispatchers: typing.Mapping[str, typing.Tuple[DispatcherType, bool]]
_cache: typing.ClassVar[Cache] = Cache('fsevents')
def __init__(self) -> None:
# Initialize _dispatchers, Second element of tuple is True if the dispatcher has "intervals"
@ -105,7 +108,7 @@ class StatsFS(types.UDSFSInterface):
range = self._interval[interval]
else:
range = (StatsFS._interval['today']) # Does not matter, it's just a placeholder
range = (StatsFS._interval['lastmonth']) # Any value except "today" will do the trick
extension = interval
if extension != 'csv':
@ -146,17 +149,33 @@ class StatsFS(types.UDSFSInterface):
dispatcher, interval, extension = self.getFilenameComponents(path)
# if interval is today, cache time is 10 seconds, else cache time is 60 seconds
if interval == StatsFS._interval['today']:
cacheTime = 10
else:
cacheTime = 60
# Check if the file info is cached
cached = self._cache.get(path[0])
if cached is not None:
logger.debug('Cache hit for %s', path[0])
return cached
# Calculate the size of the file
size = len(dispatcher(interval, extension, 0, 0))
logger.debug('Size of %s: %s', path[0], size)
return types.StatType(
data = types.StatType(
st_mode=(stat.S_IFREG | 0o755),
st_nlink=1,
st_size=size,
st_mtime=interval.start_poxix,
)
# store in cache
self._cache.put(path[0], data, cacheTime)
return data
def read(self, path: typing.List[str], size: int, offset: int) -> bytes:
logger.debug('Reading data from %s: offset: %s, size: %s', path, offset, size)
@ -192,5 +211,9 @@ class StatsFS(types.UDSFSInterface):
self, interval: StatInterval, extension: str, size: int, offset: int
) -> bytes:
logger.debug('Reading pools. Interval=%s, extension=%s, offset: %s, size: %s', interval, extension, offset, size)
return b'xxxx'
# Compose the csv file from what we now of service pools
virtualFile = models.ServicePool.getCSVHeader().encode() + b'\n'
# First, get the list of service pools
for pool in models.ServicePool.objects.all().order_by('name'):
virtualFile += pool.toCsv().encode() + b'\n'
return virtualFile

View File

@ -502,9 +502,7 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
raise InvalidServiceException()
def validateTransport(self, transport) -> None:
try:
self.transports.get(id=transport.id)
except:
if self.transports.filter(id=transport.id).count() == 0:
raise InvalidServiceException()
def validateUser(self, user: 'User') -> None:
@ -659,6 +657,7 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
"""
Returns the % used services, related to "maximum" user services
If no "maximum" number of services, will return 0% ofc
cachedValue is used to optimize (if known the number of assigned services, we can avoid to query the db)
"""
maxs = self.max_srvs
if maxs == 0:
@ -711,6 +710,35 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
# Clears related permissions
clean(toDelete)
# returns CSV header
@staticmethod
def getCSVHeader(sep: str = ',') -> str:
return sep.join(
[
'name',
'initial',
'cache_l1',
'cache_l2',
'max',
'assigned_services',
'cached_services',
]
)
# Return record as csv line using separator (default: ',')
def toCsv(self, sep: str = ',') -> str:
return sep.join(
[
self.name,
str(self.initial_srvs),
str(self.cache_l1_srvs),
str(self.cache_l2_srvs),
str(self.max_srvs),
str(self.assignedUserServices().count()),
str(self.cachedUserServices().count()),
]
)
def __str__(self):
return 'Deployed service {}({}) with {} as initial, {} as L1 cache, {} as L2 cache, {} as max'.format(
self.name,

View File

@ -42,6 +42,7 @@ from .util import getSqlDatetimeAsUnix
logger = logging.getLogger(__name__)
class StatsEvents(models.Model):
"""
Statistics about events (login, logout, whatever...)
@ -135,8 +136,21 @@ class StatsEvents(models.Model):
# returns CSV header
@staticmethod
def getCSVHeader() -> str:
return 'owner_type,owner_id,event_type,stamp,field_1,field_2,field_3,field_4'
def getCSVHeader(
sep: str = '',
) -> str:
return sep.join(
[
'owner_type',
'owner_id',
'event_type',
'stamp',
'field_1',
'field_2',
'field_3',
'field_4',
]
)
# Return record as csv line using separator (default: ',')
def toCsv(self, sep: str = ',') -> str: