1
0
mirror of https://github.com/dkmstr/openuds.git synced 2024-12-25 23:21:41 +03:00

advancing on refactoring and cache improvements

This commit is contained in:
Adolfo Gómez García 2024-01-08 00:44:44 +01:00
parent 4cf261678f
commit bc61814ec4
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
128 changed files with 706 additions and 670 deletions

View File

@ -282,7 +282,7 @@ class Dispatcher(View):
return not issubclass(x, DetailHandler) and not x.__subclasses__()
# Register all subclasses of Handler
modfinder.dynamicLoadAndRegisterPackages(
modfinder.dynamically_load_and_register_packages(
Dispatcher.registerClass,
Handler,
modName=modName,

View File

@ -37,7 +37,7 @@ import collections.abc
from django.utils.translation import gettext as _
from uds.core.util import ensure, permissions
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.models import Account, AccountUsage
from uds.REST import RequestError
from uds.REST.model import DetailHandler
@ -84,7 +84,7 @@ class AccountsUsage(DetailHandler): # pylint: disable=too-many-public-methods
try:
if not item:
return [AccountsUsage.usageToDict(k, perm) for k in parent.usages.all()]
k = parent.usages.get(uuid=processUuid(item))
k = parent.usages.get(uuid=process_uuid(item))
return AccountsUsage.usageToDict(k, perm)
except Exception:
logger.exception('itemId %s', item)
@ -111,7 +111,7 @@ class AccountsUsage(DetailHandler): # pylint: disable=too-many-public-methods
parent = ensure.is_instance(parent, Account)
logger.debug('Deleting account usage %s from %s', item, parent)
try:
usage = parent.usages.get(uuid=processUuid(item))
usage = parent.usages.get(uuid=process_uuid(item))
usage.delete()
except Exception:
logger.exception('Exception')

View File

@ -669,7 +669,7 @@ class Logout(ActorV3Action):
if userService.in_use: # If already logged out, do not add a second logout (windows does this i.e.)
osmanagers.OSManager.logged_out(userService, username)
if osManager:
if osManager.isRemovableOnLogout(userService):
if osManager.is_removableOnLogout(userService):
logger.debug('Removable on logout: %s', osManager)
userService.remove()
else:

View File

@ -43,7 +43,7 @@ from uds.core.environment import Environment
from uds.REST import NotFound
from uds.REST.model import ModelHandler
from uds.core.util import permissions, ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.ui import gui
from .users_groups import Users, Groups
@ -235,7 +235,7 @@ class Authenticators(ModelHandler):
logger.debug(self._params)
if fields.get('mfa_id'):
try:
mfa = MFA.objects.get(uuid=processUuid(fields['mfa_id']))
mfa = MFA.objects.get(uuid=process_uuid(fields['mfa_id']))
fields['mfa_id'] = mfa.id
except MFA.DoesNotExist:
pass # will set field to null

View File

@ -39,7 +39,7 @@ from django.db import IntegrityError
from django.utils.translation import gettext as _
from uds.core.util import ensure, permissions
from uds.core.util.model import sql_datetime, processUuid
from uds.core.util.model import sql_datetime, process_uuid
from uds.models.calendar_rule import CalendarRule, freqs
from uds.models.calendar import Calendar
from uds.REST import RequestError
@ -86,7 +86,7 @@ class CalendarRules(DetailHandler): # pylint: disable=too-many-public-methods
try:
if item is None:
return [CalendarRules.ruleToDict(k, perm) for k in parent.rules.all()]
k = parent.rules.get(uuid=processUuid(item))
k = parent.rules.get(uuid=process_uuid(item))
return CalendarRules.ruleToDict(k, perm)
except Exception as e:
logger.exception('itemId %s', item)
@ -143,7 +143,7 @@ class CalendarRules(DetailHandler): # pylint: disable=too-many-public-methods
if item is None: # Create new
calRule = parent.rules.create(**fields)
else:
calRule = parent.rules.get(uuid=processUuid(item))
calRule = parent.rules.get(uuid=process_uuid(item))
calRule.__dict__.update(fields)
calRule.save()
except CalendarRule.DoesNotExist:
@ -158,7 +158,7 @@ class CalendarRules(DetailHandler): # pylint: disable=too-many-public-methods
parent = ensure.is_instance(parent, Calendar)
logger.debug('Deleting rule %s from %s', item, parent)
try:
calRule = parent.rules.get(uuid=processUuid(item))
calRule = parent.rules.get(uuid=process_uuid(item))
calRule.calendar.modified = sql_datetime()
calRule.calendar.save()
calRule.delete()

View File

@ -52,5 +52,5 @@ class Config(Handler):
for section, secDict in self._params.items():
for key, vals in secDict.items():
logger.info('Updating config value %s.%s to %s by %s', section, key, vals['value'], self._user.name)
CfgConfig.update(CfgConfig.SectionType.fromStr(section), key, vals['value'])
CfgConfig.update(CfgConfig.SectionType.from_str(section), key, vals['value'])
return 'done'

View File

@ -125,7 +125,7 @@ class Connection(Handler):
}
if itrans: # only will be available id doNotCheck is False
connectionInfoDict.update(
itrans.getConnectionInfo(userService, self._user, 'UNKNOWN').as_dict()
itrans.get_connection_info(userService, self._user, 'UNKNOWN').as_dict()
)
return Connection.result(result=connectionInfoDict)
except ServiceNotReadyError as e:

View File

@ -44,7 +44,7 @@ from uds.core.auths.auth import authenticate
from uds.core.managers.crypto import CryptoManager
from uds.core.util.cache import Cache
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_stamp_seconds, processUuid
from uds.core.util.model import sql_stamp_seconds, process_uuid
from uds.models import Authenticator
from uds.REST import AccessDenied, Handler, RequestError
from uds.REST.utils import rest_result
@ -166,7 +166,7 @@ class Login(Handler):
# Will raise an exception if no auth found
if authId:
auth = Authenticator.objects.get(uuid=processUuid(authId))
auth = Authenticator.objects.get(uuid=process_uuid(authId))
elif authName:
auth = Authenticator.objects.get(name=authName)
else:

View File

@ -41,7 +41,7 @@ from uds.core import types
from uds.core.consts.images import DEFAULT_THUMB_BASE64
from uds.core.ui import gui
from uds.core.util import ensure, permissions
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util.state import State
from uds.models import Image, MetaPool, ServicePoolGroup
from uds.REST import RequestError, ResponseError
@ -263,7 +263,7 @@ class MetaPools(ModelHandler):
logger.debug('Image id: %s', imgId)
try:
if imgId != '-1':
image = Image.objects.get(uuid=processUuid(imgId))
image = Image.objects.get(uuid=process_uuid(imgId))
fields['image_id'] = image.id
except Exception:
logger.exception('At image recovering')
@ -274,7 +274,7 @@ class MetaPools(ModelHandler):
logger.debug('servicesPoolGroup_id: %s', spgrpId)
try:
if spgrpId != '-1':
spgrp = ServicePoolGroup.objects.get(uuid=processUuid(spgrpId))
spgrp = ServicePoolGroup.objects.get(uuid=process_uuid(spgrpId))
fields['servicesPoolGroup_id'] = spgrp.id
except Exception:
logger.exception('At service pool group recovering')

View File

@ -43,7 +43,7 @@ from uds import models
# from uds.models.user import User
from uds.core.util.state import State
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util import log, ensure
from uds.REST.model import DetailHandler
from .user_services import AssignedService
@ -77,7 +77,7 @@ class MetaServicesPool(DetailHandler):
try:
if not item:
return [MetaServicesPool.as_dict(i) for i in parent.members.all()]
i = parent.members.get(uuid=processUuid(item))
i = parent.members.get(uuid=process_uuid(item))
return MetaServicesPool.as_dict(i)
except Exception:
logger.exception('err: %s', item)
@ -96,9 +96,9 @@ class MetaServicesPool(DetailHandler):
def saveItem(self, parent: 'Model', item: typing.Optional[str]):
parent = ensure.is_instance(parent, models.MetaPool)
# If already exists
uuid = processUuid(item) if item else None
uuid = process_uuid(item) if item else None
pool = models.ServicePool.objects.get(uuid=processUuid(self._params['pool_id']))
pool = models.ServicePool.objects.get(uuid=process_uuid(self._params['pool_id']))
enabled = self._params['enabled'] not in ('false', False, '0', 0)
priority = int(self._params['priority'])
priority = priority if priority >= 0 else 0
@ -124,7 +124,7 @@ class MetaServicesPool(DetailHandler):
def deleteItem(self, parent: 'Model', item: str):
parent = ensure.is_instance(parent, models.MetaPool)
member = parent.members.get(uuid=processUuid(self._args[0]))
member = parent.members.get(uuid=process_uuid(self._args[0]))
logStr = "Removed meta pool member {} by {}".format(member.pool.name, self._user.pretty_name)
member.delete()
@ -155,7 +155,7 @@ class MetaAssignedService(DetailHandler):
"""
try:
return models.UserService.objects.filter(
uuid=processUuid(userServiceId),
uuid=process_uuid(userServiceId),
cache_level=0,
deployed_service__in=[i.pool for i in metaPool.members.all()],
)[0]
@ -198,7 +198,7 @@ class MetaAssignedService(DetailHandler):
props={
k: v
for k, v in models.Properties.objects.filter(
owner_type='userservice', owner_id=processUuid(item)
owner_type='userservice', owner_id=process_uuid(item)
).values_list('key', 'value')
},
)
@ -277,7 +277,7 @@ class MetaAssignedService(DetailHandler):
fields = self.readFieldsFromParams(['auth_id', 'user_id'])
service = self._getAssignedService(parent, item)
user = models.User.objects.get(uuid=processUuid(fields['user_id']))
user = models.User.objects.get(uuid=process_uuid(fields['user_id']))
logStr = 'Changing ownership of service from {} to {} by {}'.format(
service.user.pretty_name if service.user else 'unknown', user.pretty_name, self._user.pretty_name

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext as _
from uds.core import types
from uds.core.util import log, ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.models import Calendar, CalendarAction, CalendarAccess, ServicePool
from uds.models.calendar_action import CALENDAR_ACTION_DICT
from uds.REST.model import DetailHandler
@ -71,7 +71,7 @@ class AccessCalendars(DetailHandler):
if not item:
return [AccessCalendars.as_dict(i) for i in parent.calendarAccess.all()]
return AccessCalendars.as_dict(
parent.calendarAccess.get(uuid=processUuid(item))
parent.calendarAccess.get(uuid=process_uuid(item))
)
except Exception as e:
logger.exception('err: %s', item)
@ -90,11 +90,11 @@ class AccessCalendars(DetailHandler):
def saveItem(self, parent: 'Model', item: typing.Optional[str]) -> None:
parent = ensure.is_instance(parent, ServicePool)
# If already exists
uuid = processUuid(item) if item is not None else None
uuid = process_uuid(item) if item is not None else None
try:
calendar: Calendar = Calendar.objects.get(
uuid=processUuid(self._params['calendarId'])
uuid=process_uuid(self._params['calendarId'])
)
access: str = self._params['access'].upper()
if access not in (ALLOW, DENY):
@ -126,7 +126,7 @@ class AccessCalendars(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, ServicePool)
calendarAccess = parent.calendarAccess.get(uuid=processUuid(self._args[0]))
calendarAccess = parent.calendarAccess.get(uuid=process_uuid(self._args[0]))
logStr = f'Removed access calendar {calendarAccess.calendar.name} by {self._user.pretty_name}'
calendarAccess.delete()
@ -167,7 +167,7 @@ class ActionsCalendars(DetailHandler):
return [
ActionsCalendars.as_dict(i) for i in parent.calendaraction_set.all()
]
i = parent.calendaraction_set.get(uuid=processUuid(item))
i = parent.calendaraction_set.get(uuid=process_uuid(item))
return ActionsCalendars.as_dict(i)
except Exception as e:
raise self.invalidItemException() from e
@ -189,9 +189,9 @@ class ActionsCalendars(DetailHandler):
def saveItem(self, parent: 'Model', item: typing.Optional[str]) -> None:
parent = ensure.is_instance(parent, ServicePool)
# If already exists
uuid = processUuid(item) if item is not None else None
uuid = process_uuid(item) if item is not None else None
calendar = Calendar.objects.get(uuid=processUuid(self._params['calendarId']))
calendar = Calendar.objects.get(uuid=process_uuid(self._params['calendarId']))
action = self._params['action'].upper()
if action not in CALENDAR_ACTION_DICT:
raise self.invalidRequestException()
@ -229,7 +229,7 @@ class ActionsCalendars(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, ServicePool)
calendarAction = CalendarAction.objects.get(uuid=processUuid(self._args[0]))
calendarAction = CalendarAction.objects.get(uuid=process_uuid(self._args[0]))
logStr = (
f'Removed scheduled action "{calendarAction.calendar.name},'
f'{calendarAction.action},{calendarAction.events_offset},'
@ -244,7 +244,7 @@ class ActionsCalendars(DetailHandler):
def execute(self, parent: 'Model', item: str):
parent = ensure.is_instance(parent, ServicePool)
logger.debug('Launching action')
uuid = processUuid(item)
uuid = process_uuid(item)
calendarAction: CalendarAction = CalendarAction.objects.get(uuid=uuid)
self.ensureAccess(calendarAction, types.permissions.PermissionType.MANAGEMENT)

View File

@ -137,19 +137,19 @@ class Permissions(Handler):
cls = Permissions.getClass(cls_param)
obj = cls.objects.get(uuid=obj_param)
user = models.User.objects.get(uuid=user_param)
permissions.addUserPermission(user, obj, perm)
permissions.add_user_permission(user, obj, perm)
return Permissions.permsToDict(permissions.getPermissions(obj))
def add_group_permission(cls_param: str, obj_param: str, group_param: str) -> list[dict]:
cls = Permissions.getClass(cls_param)
obj = cls.objects.get(uuid=obj_param)
group = models.Group.objects.get(uuid=group_param)
permissions.addGroupPermission(group, obj, perm)
permissions.add_group_permission(group, obj, perm)
return Permissions.permsToDict(permissions.getPermissions(obj))
def revoke() -> list[dict]:
for permId in self._params.get('items', []):
permissions.revokePermissionById(permId)
permissions.revoke_permission_by_id(permId)
return []
def no_match() -> None:
@ -171,17 +171,17 @@ class Permissions(Handler):
if self._args[2] == 'users':
user = models.User.objects.get(uuid=self._args[4])
permissions.addUserPermission(user, obj, perm)
permissions.add_user_permission(user, obj, perm)
elif self._args[2] == 'groups':
group = models.Group.objects.get(uuid=self._args[4])
permissions.addGroupPermission(group, obj, perm)
permissions.add_group_permission(group, obj, perm)
else:
raise RequestError('Ivalid request')
return Permissions.permsToDict(permissions.getPermissions(obj))
if la == 1 and self._args[0] == 'revoke':
for permId in self._params.get('items', []):
permissions.revokePermissionById(permId)
permissions.revoke_permission_by_id(permId)
return []
raise RequestError('Invalid request')

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext_lazy as _
from uds import models
from uds.core import consts, types, ui
from uds.core.util import permissions, ensure
from uds.core.util.model import sql_datetime, processUuid
from uds.core.util.model import sql_datetime, process_uuid
from uds.REST.exceptions import NotFound, RequestError
from uds.REST.model import DetailHandler, ModelHandler
@ -100,7 +100,7 @@ class ServersTokens(ModelHandler):
) # Must have write permissions to delete
try:
self.model.objects.get(uuid=processUuid(self._args[0])).delete()
self.model.objects.get(uuid=process_uuid(self._args[0])).delete()
except self.model.DoesNotExist:
raise NotFound('Element do not exists') from None
@ -119,7 +119,7 @@ class ServersServers(DetailHandler):
multi = True
q = parent.servers.all()
else:
q = parent.servers.filter(uuid=processUuid(item))
q = parent.servers.filter(uuid=process_uuid(item))
res = []
i = None
for i in q:
@ -259,7 +259,7 @@ class ServersServers(DetailHandler):
elif parent.type == types.servers.ServerType.SERVER:
# Get server
try:
server = models.Server.objects.get(uuid=processUuid(self._params['server']))
server = models.Server.objects.get(uuid=process_uuid(self._params['server']))
# Check server type is also SERVER
if server and server.type != types.servers.ServerType.SERVER:
logger.error('Server type for %s is not SERVER', server.host)
@ -270,7 +270,7 @@ class ServersServers(DetailHandler):
pass
else:
try:
server = models.Server.objects.get(uuid=processUuid(item))
server = models.Server.objects.get(uuid=process_uuid(item))
parent.servers.add(server)
except Exception:
raise self.invalidItemException() from None
@ -280,7 +280,7 @@ class ServersServers(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServerGroup)
try:
server = models.Server.objects.get(uuid=processUuid(item))
server = models.Server.objects.get(uuid=process_uuid(item))
if parent.server_type == types.servers.ServerType.UNMANAGED:
parent.servers.remove(server) # Remove reference
server.delete() # and delete server
@ -296,7 +296,7 @@ class ServersServers(DetailHandler):
Custom method that swaps maintenance mode state for a tunnel server
:param item:
"""
item = models.Server.objects.get(uuid=processUuid(id))
item = models.Server.objects.get(uuid=process_uuid(id))
self.ensureAccess(item, types.permissions.PermissionType.MANAGEMENT)
item.maintenance_mode = not item.maintenance_mode
item.save()

View File

@ -42,7 +42,7 @@ from uds import models
from uds.core import exceptions, types
import uds.core.types.permissions
from uds.core.util import log, permissions, ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.environment import Environment
from uds.core.consts.images import DEFAULT_THUMB_BASE64
from uds.core.ui import gui
@ -125,7 +125,7 @@ class Services(DetailHandler): # pylint: disable=too-many-public-methods
try:
if item is None:
return [Services.serviceToDict(k, perm) for k in parent.services.all()]
k = parent.services.get(uuid=processUuid(item))
k = parent.services.get(uuid=process_uuid(item))
val = Services.serviceToDict(k, perm, full=True)
return self.fillIntanceFields(k, val)
except Exception as e:
@ -170,7 +170,7 @@ class Services(DetailHandler): # pylint: disable=too-many-public-methods
if not item: # Create new
service = parent.services.create(**fields)
else:
service = parent.services.get(uuid=processUuid(item))
service = parent.services.get(uuid=process_uuid(item))
service.__dict__.update(fields)
if service is None:
@ -216,7 +216,7 @@ class Services(DetailHandler): # pylint: disable=too-many-public-methods
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.Provider)
try:
service = parent.services.get(uuid=processUuid(item))
service = parent.services.get(uuid=process_uuid(item))
if service.deployedServices.count() == 0:
service.delete()
return
@ -330,7 +330,7 @@ class Services(DetailHandler): # pylint: disable=too-many-public-methods
def getLogs(self, parent: 'Model', item: str) -> list[typing.Any]:
parent = ensure.is_instance(parent, models.Provider)
try:
service = parent.services.get(uuid=processUuid(item))
service = parent.services.get(uuid=process_uuid(item))
logger.debug('Getting logs for %s', item)
return log.get_logs(service)
except Exception:
@ -338,7 +338,7 @@ class Services(DetailHandler): # pylint: disable=too-many-public-methods
def servicesPools(self, parent: 'Model', item: str) -> typing.Any:
parent = ensure.is_instance(parent, models.Provider)
service = parent.services.get(uuid=processUuid(item))
service = parent.services.get(uuid=process_uuid(item))
logger.debug('Got parameters for servicepools: %s, %s', parent, item)
res = []
for i in service.deployedServices.all():

View File

@ -41,7 +41,7 @@ from uds.core import types
from uds.core.consts.images import DEFAULT_THUMB_BASE64
from uds.core.ui import gui
from uds.core.util import ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.models import Image, ServicePoolGroup
from uds.REST.model import ModelHandler
@ -85,7 +85,7 @@ class ServicesPoolGroups(ModelHandler):
logger.debug('Image id: %s', imgId)
try:
if imgId != '-1':
image = Image.objects.get(uuid=processUuid(imgId))
image = Image.objects.get(uuid=process_uuid(imgId))
fields['image_id'] = image.id
except Exception:
logger.exception('At image recovering')

View File

@ -45,7 +45,7 @@ from uds.core.ui import gui
from uds.core.consts.images import DEFAULT_THUMB_BASE64
from uds.core.util import log, permissions, ensure
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_datetime, processUuid
from uds.core.util.model import sql_datetime, process_uuid
from uds.core.util.state import State
from uds.models import (Account, Image, OSManager, Service, ServicePool,
ServicePoolGroup, User)
@ -485,7 +485,7 @@ class ServicesPools(ModelHandler):
try:
try:
service = Service.objects.get(uuid=processUuid(fields['service_id']))
service = Service.objects.get(uuid=process_uuid(fields['service_id']))
fields['service_id'] = service.id
except Exception:
raise RequestError(gettext('Base service does not exist anymore')) from None
@ -500,7 +500,7 @@ class ServicesPools(ModelHandler):
self._params['allow_users_reset'] = False
if serviceType.needs_manager is True:
osmanager = OSManager.objects.get(uuid=processUuid(fields['osmanager_id']))
osmanager = OSManager.objects.get(uuid=process_uuid(fields['osmanager_id']))
fields['osmanager_id'] = osmanager.id
else:
del fields['osmanager_id']
@ -549,7 +549,7 @@ class ServicesPools(ModelHandler):
if accountId != '-1':
try:
fields['account_id'] = Account.objects.get(uuid=processUuid(accountId)).id
fields['account_id'] = Account.objects.get(uuid=process_uuid(accountId)).id
except Exception:
logger.exception('Getting account ID')
@ -559,7 +559,7 @@ class ServicesPools(ModelHandler):
logger.debug('Image id: %s', imgId)
try:
if imgId != '-1':
image = Image.objects.get(uuid=processUuid(imgId))
image = Image.objects.get(uuid=process_uuid(imgId))
fields['image_id'] = image.id
except Exception:
logger.exception('At image recovering')
@ -571,7 +571,7 @@ class ServicesPools(ModelHandler):
logger.debug('pool_group_id: %s', spgrpId)
try:
if spgrpId != '-1':
spgrp = ServicePoolGroup.objects.get(uuid=processUuid(spgrpId))
spgrp = ServicePoolGroup.objects.get(uuid=process_uuid(spgrpId))
fields['servicesPoolGroup_id'] = spgrp.id
except Exception:
logger.exception('At service pool group recovering')
@ -670,7 +670,7 @@ class ServicesPools(ModelHandler):
logger.debug('Creating from assignable: %s', self._params)
UserServiceManager().create_from_assignable(
item,
User.objects.get(uuid=processUuid(self._params['user_id'])),
User.objects.get(uuid=process_uuid(self._params['user_id'])),
self._params['assignable_id'],
)

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext as _
from uds.models import UserService, Provider
from uds.core.util.state import State
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.REST.model import DetailHandler
from uds.core.util import ensure
@ -99,7 +99,7 @@ class ServicesUsage(DetailHandler):
)
else:
userServicesQuery = UserService.objects.filter(
deployed_service__service_uuid=processUuid(item)
deployed_service__service_uuid=process_uuid(item)
)
return [
@ -138,7 +138,7 @@ class ServicesUsage(DetailHandler):
userService: UserService
try:
userService = UserService.objects.get(
uuid=processUuid(item), deployed_service__service__provider=parent
uuid=process_uuid(item), deployed_service__service__provider=parent
)
except Exception:
raise self.invalidItemException()

View File

@ -41,7 +41,7 @@ from uds import models
from uds.core import types
from uds.core.util.model import sql_datetime
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util.stats import counters
from uds.core.util.cache import Cache
from uds.core.util.state import State
@ -182,7 +182,7 @@ class System(Handler):
if len(self._args) == 3:
try:
pool = models.ServicePool.objects.get(
uuid=processUuid(self._args[2])
uuid=process_uuid(self._args[2])
)
except Exception:
pool = None
@ -190,7 +190,7 @@ class System(Handler):
if not pool and not self._user.is_admin:
raise AccessDenied()
# Check permission for pool..
if not permissions.hasAccess(
if not permissions.has_access(
self._user, typing.cast('Model', pool), types.permissions.PermissionType.READ
):
raise AccessDenied()

View File

@ -40,7 +40,7 @@ from uds.REST import Handler
from uds.REST import RequestError
from uds import models
from uds.core.managers.crypto import CryptoManager
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util import ensure
logger = logging.getLogger(__name__)
@ -167,7 +167,7 @@ class Tickets(Handler):
# Will raise an exception if no auth found
if authId:
auth = models.Authenticator.objects.get(
uuid=processUuid(authId.lower())
uuid=process_uuid(authId.lower())
)
elif authName:
auth = models.Authenticator.objects.get(name=authName)
@ -213,7 +213,7 @@ class Tickets(Handler):
poolUuid = self.getParam('servicePool')
if poolUuid:
# Check if is pool or metapool
poolUuid = processUuid(poolUuid)
poolUuid = process_uuid(poolUuid)
pool: typing.Union[models.ServicePool, models.MetaPool]
try:

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext_lazy as _
import uds.core.types.permissions
from uds.core import types, consts, ui
from uds.core.util import permissions, validators, ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds import models
from uds.REST.model import DetailHandler, ModelHandler
@ -63,7 +63,7 @@ class TunnelServers(DetailHandler):
multi = True
q = parent.servers.all().order_by('hostname')
else:
q = parent.servers.filter(uuid=processUuid(item))
q = parent.servers.filter(uuid=process_uuid(item))
res = []
i = None
for i in q:
@ -119,7 +119,7 @@ class TunnelServers(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServerGroup)
try:
parent.servers.remove(models.Server.objects.get(uuid=processUuid(item)))
parent.servers.remove(models.Server.objects.get(uuid=process_uuid(item)))
except Exception:
raise self.invalidItemException() from None
@ -130,7 +130,7 @@ class TunnelServers(DetailHandler):
Custom method that swaps maintenance mode state for a tunnel server
:param item:
"""
item = models.Server.objects.get(uuid=processUuid(id))
item = models.Server.objects.get(uuid=process_uuid(id))
self.ensureAccess(item, uds.core.types.permissions.PermissionType.MANAGEMENT)
item.maintenance_mode = not item.maintenance_mode
item.save()
@ -221,7 +221,7 @@ class Tunnels(ModelHandler):
raise self.invalidItemException('No server specified')
try:
server = models.Server.objects.get(uuid=processUuid(item))
server = models.Server.objects.get(uuid=process_uuid(item))
self.ensureAccess(server, uds.core.types.permissions.PermissionType.READ)
parent.servers.add(server)
except Exception:

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext as _
from uds import models
import uds.core.types.permissions
from uds.core.util.state import State
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util import log, permissions, ensure
from uds.core.managers.user_service import UserServiceManager
from uds.REST.model import DetailHandler
@ -138,11 +138,11 @@ class AssignedService(DetailHandler):
.prefetch_related('deployed_service', 'publication', 'user')
]
return AssignedService.itemToDict(
parent.assigned_user_services().get(processUuid(uuid=processUuid(item))),
parent.assigned_user_services().get(process_uuid(uuid=process_uuid(item))),
props={
k: v
for k, v in models.Properties.objects.filter(
owner_type='userservice', owner_id=processUuid(item)
owner_type='userservice', owner_id=process_uuid(item)
).values_list('key', 'value')
},
)
@ -181,7 +181,7 @@ class AssignedService(DetailHandler):
def getLogs(self, parent: 'Model', item: str) -> list[typing.Any]:
parent = ensure.is_instance(parent, models.ServicePool)
try:
userService: models.UserService = parent.assigned_user_services().get(uuid=processUuid(item))
userService: models.UserService = parent.assigned_user_services().get(uuid=process_uuid(item))
logger.debug('Getting logs for %s', userService)
return log.get_logs(userService)
except Exception as e:
@ -191,7 +191,7 @@ class AssignedService(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
try:
userService: models.UserService = parent.userServices.get(uuid=processUuid(item))
userService: models.UserService = parent.userServices.get(uuid=process_uuid(item))
except Exception as e:
logger.exception('deleteItem')
raise self.invalidItemException() from e
@ -218,8 +218,8 @@ class AssignedService(DetailHandler):
if not item:
raise self.invalidItemException('Only modify is allowed')
fields = self.readFieldsFromParams(['auth_id', 'user_id'])
userService = parent.userServices.get(uuid=processUuid(item))
user = models.User.objects.get(uuid=processUuid(fields['user_id']))
userService = parent.userServices.get(uuid=process_uuid(item))
user = models.User.objects.get(uuid=process_uuid(fields['user_id']))
logStr = f'Changed ownership of service {userService.friendly_name} from {userService.user} to {user.pretty_name} by {self._user.pretty_name}'
@ -242,7 +242,7 @@ class AssignedService(DetailHandler):
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
def reset(self, parent: 'models.ServicePool', item: str) -> typing.Any:
userService = parent.userServices.get(uuid=processUuid(item))
userService = parent.userServices.get(uuid=process_uuid(item))
UserServiceManager().reset(userService)
@ -264,7 +264,7 @@ class CachedService(AssignedService):
.all()
.prefetch_related('deployed_service', 'publication')
]
cachedService: models.UserService = parent.cached_users_services().get(uuid=processUuid(item))
cachedService: models.UserService = parent.cached_users_services().get(uuid=process_uuid(item))
return AssignedService.itemToDict(cachedService, is_cache=True)
except Exception as e:
logger.exception('getItems')
@ -294,7 +294,7 @@ class CachedService(AssignedService):
def getLogs(self, parent: 'Model', item: str) -> list[typing.Any]:
parent = ensure.is_instance(parent, models.ServicePool)
try:
userService = parent.cached_users_services().get(uuid=processUuid(item))
userService = parent.cached_users_services().get(uuid=process_uuid(item))
logger.debug('Getting logs for %s', item)
return log.get_logs(userService)
except Exception:
@ -357,7 +357,7 @@ class Groups(DetailHandler):
def saveItem(self, parent: 'Model', item: typing.Optional[str]) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
group: models.Group = models.Group.objects.get(uuid=processUuid(self._params['id']))
group: models.Group = models.Group.objects.get(uuid=process_uuid(self._params['id']))
parent.assignedGroups.add(group)
log.log(
parent,
@ -368,7 +368,7 @@ class Groups(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
group: models.Group = models.Group.objects.get(uuid=processUuid(self._args[0]))
group: models.Group = models.Group.objects.get(uuid=process_uuid(self._args[0]))
parent.assignedGroups.remove(group)
log.log(
parent,
@ -418,7 +418,7 @@ class Transports(DetailHandler):
def saveItem(self, parent: 'Model', item: typing.Optional[str]) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
transport: models.Transport = models.Transport.objects.get(uuid=processUuid(self._params['id']))
transport: models.Transport = models.Transport.objects.get(uuid=process_uuid(self._params['id']))
parent.transports.add(transport)
log.log(
parent,
@ -429,7 +429,7 @@ class Transports(DetailHandler):
def deleteItem(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
transport: models.Transport = models.Transport.objects.get(uuid=processUuid(self._args[0]))
transport: models.Transport = models.Transport.objects.get(uuid=process_uuid(self._args[0]))
parent.transports.remove(transport)
log.log(
parent,
@ -455,7 +455,7 @@ class Publications(DetailHandler):
changeLog = self._params['changelog'] if 'changelog' in self._params else None
if (
permissions.hasAccess(self._user, parent, uds.core.types.permissions.PermissionType.MANAGEMENT)
permissions.has_access(self._user, parent, uds.core.types.permissions.PermissionType.MANAGEMENT)
is False
):
logger.debug('Management Permission failed for user %s', self._user)
@ -482,14 +482,14 @@ class Publications(DetailHandler):
"""
parent = ensure.is_instance(parent, models.ServicePool)
if (
permissions.hasAccess(self._user, parent, uds.core.types.permissions.PermissionType.MANAGEMENT)
permissions.has_access(self._user, parent, uds.core.types.permissions.PermissionType.MANAGEMENT)
is False
):
logger.debug('Management Permission failed for user %s', self._user)
raise self.accessDenied()
try:
ds = models.ServicePoolPublication.objects.get(uuid=processUuid(uuid))
ds = models.ServicePoolPublication.objects.get(uuid=process_uuid(uuid))
ds.cancel()
except Exception as e:
raise ResponseError(str(e)) from e

View File

@ -42,7 +42,7 @@ from uds.core.util.state import State
from uds.core.auths.user import User as aUser
from uds.core.util import log, ensure
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.models import Authenticator, User, Group, ServicePool
from uds.core.managers.crypto import CryptoManager
from uds.REST import RequestError
@ -122,7 +122,7 @@ class Users(DetailHandler):
or _('User')
)
return values
u = parent.users.get(uuid=processUuid(item))
u = parent.users.get(uuid=process_uuid(item))
res = model_to_dict(
u,
fields=(
@ -152,7 +152,7 @@ class Users(DetailHandler):
def getTitle(self, parent: 'Model') -> str:
try:
return _('Users of {0}').format(
Authenticator.objects.get(uuid=processUuid(self._kwargs['parent_id'])).name
Authenticator.objects.get(uuid=process_uuid(self._kwargs['parent_id'])).name
)
except Exception:
return _('Current users')
@ -187,7 +187,7 @@ class Users(DetailHandler):
parent = ensure.is_instance(parent, Authenticator)
user = None
try:
user = parent.users.get(uuid=processUuid(item))
user = parent.users.get(uuid=process_uuid(item))
except Exception:
raise self.invalidItemException() from None
@ -231,7 +231,7 @@ class Users(DetailHandler):
user = parent.users.create(**fields)
else:
auth.modify_user(fields) # Notifies authenticator
user = parent.users.get(uuid=processUuid(item))
user = parent.users.get(uuid=process_uuid(item))
user.__dict__.update(fields)
user.save()
@ -260,7 +260,7 @@ class Users(DetailHandler):
def deleteItem(self, parent: 'Model', item: str):
parent = ensure.is_instance(parent, Authenticator)
try:
user = parent.users.get(uuid=processUuid(item))
user = parent.users.get(uuid=process_uuid(item))
if not self._user.is_admin and (user.is_admin or user.staff_member):
logger.warning(
'Removal of user %s denied due to insufficients rights',
@ -292,8 +292,8 @@ class Users(DetailHandler):
def servicesPools(self, parent: 'Model', item: str):
parent = ensure.is_instance(parent, Authenticator)
uuid = processUuid(item)
user = parent.users.get(uuid=processUuid(uuid))
uuid = process_uuid(item)
user = parent.users.get(uuid=process_uuid(uuid))
res = []
groups = list(user.getGroups())
for i in getPoolsForGroups(groups):
@ -313,8 +313,8 @@ class Users(DetailHandler):
def userServices(self, parent: 'Authenticator', item: str) -> list[dict]:
parent = ensure.is_instance(parent, Authenticator)
uuid = processUuid(item)
user = parent.users.get(uuid=processUuid(uuid))
uuid = process_uuid(item)
user = parent.users.get(uuid=process_uuid(uuid))
res = []
for i in user.userServices.all():
if i.state == State.USABLE:
@ -326,8 +326,8 @@ class Users(DetailHandler):
return res
def cleanRelated(self, parent: 'Authenticator', item: str) -> dict[str, str]:
uuid = processUuid(item)
user = parent.users.get(uuid=processUuid(uuid))
uuid = process_uuid(item)
user = parent.users.get(uuid=process_uuid(uuid))
user.cleanRelated()
return {'status': 'ok'}
@ -343,7 +343,7 @@ class Groups(DetailHandler):
multi = True
q = parent.groups.all().order_by('name')
else:
q = parent.groups.filter(uuid=processUuid(item))
q = parent.groups.filter(uuid=process_uuid(item))
res = []
i = None
for i in q:
@ -456,7 +456,7 @@ class Groups(DetailHandler):
toSave['comments'] = fields['comments'][:255]
toSave['meta_if_any'] = meta_if_any
group = parent.groups.get(uuid=processUuid(item))
group = parent.groups.get(uuid=process_uuid(item))
group.__dict__.update(toSave)
if is_meta:
@ -493,8 +493,8 @@ class Groups(DetailHandler):
def servicesPools(self, parent: 'Model', item: str) -> list[collections.abc.Mapping[str, typing.Any]]:
parent = ensure.is_instance(parent, Authenticator)
uuid = processUuid(item)
group = parent.groups.get(uuid=processUuid(uuid))
uuid = process_uuid(item)
group = parent.groups.get(uuid=process_uuid(uuid))
res: list[collections.abc.Mapping[str, typing.Any]] = []
for i in getPoolsForGroups((group,)):
res.append(
@ -512,9 +512,9 @@ class Groups(DetailHandler):
return res
def users(self, parent: 'Model', item: str) -> list[collections.abc.Mapping[str, typing.Any]]:
uuid = processUuid(item)
uuid = process_uuid(item)
parent = ensure.is_instance(parent, Authenticator)
group = parent.groups.get(uuid=processUuid(uuid))
group = parent.groups.get(uuid=process_uuid(uuid))
def info(user):
return {

View File

@ -47,7 +47,7 @@ from uds.core import exceptions as udsExceptions
from uds.core import types
from uds.core.module import Module
from uds.core.util import log, permissions
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.models import ManagedObjectModel, Network, Tag, TaggingMixin
from uds.REST.utils import rest_result
@ -263,7 +263,7 @@ class BaseModelHandler(Handler):
permission: 'types.permissions.PermissionType',
root: bool = False,
) -> None:
if not permissions.hasAccess(self._user, obj, permission, root):
if not permissions.has_access(self._user, obj, permission, root):
raise self.accessDenied()
def getPermissions(self, obj: models.Model, root: bool = False) -> int:
@ -510,7 +510,7 @@ class DetailHandler(BaseModelHandler):
)
# try to get id
return self.getItems(parent, processUuid(self._args[0]))
return self.getItems(parent, process_uuid(self._args[0]))
if nArgs == 2:
if self._args[0] == GUI:
@ -878,7 +878,7 @@ class ModelHandler(BaseModelHandler):
else:
requiredPermission = types.permissions.PermissionType.READ
if permissions.hasAccess(self._user, item, requiredPermission) is False:
if permissions.has_access(self._user, item, requiredPermission) is False:
logger.debug(
'Permission for user %s does not comply with %s',
self._user,
@ -938,7 +938,7 @@ class ModelHandler(BaseModelHandler):
for item in query:
try:
if (
permissions.hasAccess(
permissions.has_access(
typing.cast('User', self._user),
item,
types.permissions.PermissionType.READ,

View File

@ -103,15 +103,6 @@ class UDSAppConfig(AppConfig):
# pylint: disable=unused-import,import-outside-toplevel
from . import REST
# Ensure notifications table exists on local sqlite db (called "persistent" on settings.py)
# Note: On Notification model change, we must ensure that the table is removed on the migration itself
try:
with connections['persistent'].schema_editor() as schema_editor:
schema_editor.create_model(self.get_model('Notification'))
except Exception: # nosec: intentionally catching all exceptions
# If it fails, it's ok, it just means that it already exists
pass
default_app_config = 'uds.UDSAppConfig'

View File

@ -232,7 +232,7 @@ class OAuth2Authenticator(auths.Authenticator):
if self.publicKey.value.strip() == '':
return []
return [cert.public_key() for cert in fields.getCertificatesFromField(self.publicKey)]
return [cert.public_key() for cert in fields.get_vertificates_from_field(self.publicKey)]
def _codeVerifierAndChallenge(self) -> tuple[str, str]:
"""Generate a code verifier and a code challenge for PKCE

View File

@ -419,7 +419,7 @@ class RegexLdap(auths.Authenticator):
if self._mfaAttr:
attributes = attributes + self.__getAttrsFromField(self._mfaAttr)
user = ldaputil.getFirst(
user = ldaputil.first(
con=self.__connection(),
base=self._ldapBase,
objectClass=self._userClass,
@ -434,7 +434,7 @@ class RegexLdap(auths.Authenticator):
# For example, you can have authentication in an "user" object class and attributes in an "user_attributes" object class.
# Note: This is very rare situation, but it ocurrs :)
if user and self._altClass:
for usr in ldaputil.getAsDict(
for usr in ldaputil.as_dict(
con=self.__connection(),
base=self._ldapBase,
ldapFilter=f'(&(objectClass={self._altClass})({self._userIdAttr}={ldaputil.escape(username)}))',
@ -567,7 +567,7 @@ class RegexLdap(auths.Authenticator):
def search_users(self, pattern: str) -> collections.abc.Iterable[dict[str, str]]:
try:
res = []
for r in ldaputil.getAsDict(
for r in ldaputil.as_dict(
con=self.__connection(),
base=self._ldapBase,
ldapFilter=f'(&(&(objectClass={self._userClass})({self._userIdAttr}={ldaputil.escape(pattern)}*)))',

View File

@ -455,9 +455,9 @@ class SAMLAuthenticator(auths.Authenticator):
}
@decorators.cached(
cachePrefix='idpm',
cachingKeyFnc=CACHING_KEY_FNC,
cacheTimeout=3600 * 24 * 365, # 1 year
prefix='idpm',
key_fnc=CACHING_KEY_FNC,
timeout=3600 * 24 * 365, # 1 year
)
def getIdpMetadataDict(self) -> dict[str, typing.Any]:
if self.idpMetadata.value.startswith('http'):
@ -527,9 +527,9 @@ class SAMLAuthenticator(auths.Authenticator):
}
@decorators.cached(
cachePrefix='spm',
cachingKeyFnc=CACHING_KEY_FNC,
cacheTimeout=3600, # 1 hour
prefix='spm',
key_fnc=CACHING_KEY_FNC,
timeout=3600, # 1 hour
)
def getSpMetadata(self) -> str:
saml_settings = OneLogin_Saml2_Settings(settings=self.oneLoginSettings())

View File

@ -363,7 +363,7 @@ class SimpleLDAPAuthenticator(auths.Authenticator):
if self._mfaAttr:
attributes = attributes + [self._mfaAttr]
return ldaputil.getFirst(
return ldaputil.first(
con=self.__connection(),
base=self._ldapBase,
objectClass=self._userClass,
@ -379,7 +379,7 @@ class SimpleLDAPAuthenticator(auths.Authenticator):
@param groupName: group name to search, using user provided parameters at configuration to map search entries.
@return: None if group name is not found, an dictionary of LDAP entry attributes if found.
"""
return ldaputil.getFirst(
return ldaputil.first(
con=self.__connection(),
base=self._ldapBase,
objectClass=self._groupClass,
@ -394,7 +394,7 @@ class SimpleLDAPAuthenticator(auths.Authenticator):
groups: list[str] = []
filter_ = f'(&(objectClass={self._groupClass})(|({self._memberAttr}={user["_id"]})({self._memberAttr}={user["dn"]})))'
for d in ldaputil.getAsDict(
for d in ldaputil.as_dict(
con=self.__connection(),
base=self._ldapBase,
ldapFilter=filter_,
@ -528,7 +528,7 @@ class SimpleLDAPAuthenticator(auths.Authenticator):
def search_users(self, pattern: str) -> collections.abc.Iterable[dict[str, str]]:
try:
res = []
for r in ldaputil.getAsDict(
for r in ldaputil.as_dict(
con=self.__connection(),
base=self._ldapBase,
ldapFilter=f'(&(objectClass={self._userClass})({self._userIdAttr}={pattern}*))',
@ -550,7 +550,7 @@ class SimpleLDAPAuthenticator(auths.Authenticator):
def search_groups(self, pattern: str) -> collections.abc.Iterable[dict[str, str]]:
try:
res = []
for r in ldaputil.getAsDict(
for r in ldaputil.as_dict(
con=self.__connection(),
base=self._ldapBase,
ldapFilter=f'(&(objectClass={self._groupClass})({self._groupIdAttr}={pattern}*))',

View File

@ -50,7 +50,7 @@ def __loadModules__():
"""
from uds.core import auths
modfinder.dynamicLoadAndRegisterModules(auths.factory(), auths.Authenticator, __name__)
modfinder.dynamically_load_and_register_modules(auths.factory(), auths.Authenticator, __name__)
__loadModules__()

View File

@ -517,13 +517,13 @@ def authenticate_log_login(
request: 'ExtendedHttpRequest',
authenticator: models.Authenticator,
userName: str,
logStr: str = '',
log_string: str = '',
) -> None:
"""
Logs authentication
"""
if logStr == '':
logStr = 'Logged in'
if log_string == '':
log_string = 'Logged in'
authLogger.info(
'|'.join(
@ -532,16 +532,16 @@ def authenticate_log_login(
userName,
request.ip,
request.os.os.name,
logStr,
log_string,
request.META.get('HTTP_USER_AGENT', 'Undefined'),
]
)
)
level = log.LogLevel.INFO if logStr == 'Logged in' else log.LogLevel.ERROR
level = log.LogLevel.INFO if log_string == 'Logged in' else log.LogLevel.ERROR
log.log(
authenticator,
level,
f'user {userName} has {logStr} from {request.ip} where os is {request.os.os.name}',
f'user {userName} has {log_string} from {request.ip} where os is {request.os.os.name}',
log.LogSource.WEB,
)
@ -551,11 +551,11 @@ def authenticate_log_login(
log.log(
user,
level,
f'{logStr} from {request.ip} where OS is {request.os.os.name}',
f'{log_string} from {request.ip} where OS is {request.os.os.name}',
log.LogSource.WEB,
)
except Exception: # nosec: root user is not on any authenticator, will fail with an exception we can ingore
logger.info('Root {logStr} from %s where OS is %s', request.ip, request.os.os.name)
logger.info('Root %s from %s where OS is %s', log_string, request.ip, request.os.os.name)
def auth_log_logout(request: 'ExtendedHttpRequest') -> None:

View File

@ -33,6 +33,9 @@ import logging
import typing
import collections.abc
from django.apps import apps
from django.db import connections
from uds.core.util import singleton
from uds.core.util.log import LogLevel
@ -47,9 +50,29 @@ class NotificationsManager(metaclass=singleton.Singleton):
This class manages alerts and notifications
"""
_initialized: bool = False
def __init__(self):
pass
def _ensure_local_db_exists(self) -> bool:
if not apps.ready:
return False
if self._initialized:
return True
# Ensure notifications table exists on local sqlite db (called "persistent" on settings.py)
# Note: On Notification model change, we must ensure that the table is removed on the migration itself
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
try:
with connections['persistent'].schema_editor() as schema_editor:
schema_editor.create_model(Notification)
except Exception: # nosec: intentionally catching all exceptions
# If it fails, it's ok, it just means that it already exists
pass
self._initialized = True
return True
@staticmethod
def manager() -> 'NotificationsManager':
return NotificationsManager() # Singleton pattern will return always the same instance
@ -57,6 +80,10 @@ class NotificationsManager(metaclass=singleton.Singleton):
def notify(self, group: str, identificator: str, level: LogLevel, message: str, *args) -> None:
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
# Due to use of local db, we must ensure that it exists (and cannot do it on ready)
if self._ensure_local_db_exists() is False:
return # Not initialized apps yet, so we cannot do anything
# logger.debug(
# 'Notify: %s, %s, %s, %s, [%s]', group, identificator, level, message, args
# )

View File

@ -72,7 +72,7 @@ class ServerManager(metaclass=singleton.Singleton):
def counter_storage(self) -> 'StorageAccess':
# If counters are too old, restart them
if datetime.datetime.now() - self.last_counters_clean > self.MAX_COUNTERS_AGE:
self.clearUnmanagedUsage()
self.clear_unmanaged_usage()
return Storage(self.STORAGE_NAME).map(atomic=True, group='counters')
def property_name(self, user: typing.Optional[typing.Union[str, 'models.User']]) -> str:
@ -81,7 +81,7 @@ class ServerManager(metaclass=singleton.Singleton):
return ServerManager.BASE_PROPERTY_NAME + user
return ServerManager.BASE_PROPERTY_NAME + (str(user.uuid) if user else '_')
def clearUnmanagedUsage(self) -> None:
def clear_unmanaged_usage(self) -> None:
with self.counter_storage() as counters:
counters.clear()
self.last_counters_clean = datetime.datetime.now()

View File

@ -50,13 +50,13 @@ def process_log(server: 'models.Server', data: dict[str, typing.Any]) -> typing.
try:
userService = models.UserService.objects.get(uuid=data['userservice_uuid'])
log.log(
userService, log.LogLevel.fromStr(data['level']), data['message'], source=log.LogSource.SERVER
userService, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER
)
return rest_result(consts.OK)
except models.UserService.DoesNotExist:
pass # If not found, log on server
log.log(server, log.LogLevel.fromStr(data['level']), data['message'], source=log.LogSource.SERVER)
log.log(server, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER)
return rest_result(consts.OK)
@ -139,7 +139,7 @@ def process_logout(server: 'models.Server', data: dict[str, typing.Any]) -> typi
if userService.in_use: # If already logged out, do not add a second logout (windows does this i.e.)
osmanagers.OSManager.logged_out(userService, data['username'])
osManager: typing.Optional[osmanagers.OSManager] = userService.getOsManagerInstance()
if not osManager or osManager.isRemovableOnLogout(userService):
if not osManager or osManager.is_removableOnLogout(userService):
logger.debug('Removable on logout: %s', osManager)
userService.remove()

View File

@ -246,7 +246,9 @@ class StatsManager(metaclass=singleton.Singleton):
return _REVERSE_FLDS_EQUIV[fld]
# Event stats
def add_event(self, owner_type: types.stats.EventOwnerType, owner_id: int, event_type: types.stats.EventType, **kwargs):
def add_event(
self, owner_type: types.stats.EventOwnerType, owner_id: int, event_type: types.stats.EventType, **kwargs
):
"""
Adds a new event stat to database.
@ -271,7 +273,7 @@ class StatsManager(metaclass=singleton.Singleton):
try:
def getKwarg(fld: str) -> str:
def get_kwarg(fld: str) -> str:
val = kwargs.get(fld)
if val is None:
for i in _FLDS_EQUIV[fld]:
@ -280,10 +282,10 @@ class StatsManager(metaclass=singleton.Singleton):
break
return val or ''
fld1 = getKwarg('fld1')
fld2 = getKwarg('fld2')
fld3 = getKwarg('fld3')
fld4 = getKwarg('fld4')
fld1 = get_kwarg('fld1')
fld2 = get_kwarg('fld2')
fld3 = get_kwarg('fld3')
fld4 = get_kwarg('fld4')
StatsEvents.objects.create(
owner_type=owner_type,
@ -302,7 +304,9 @@ class StatsManager(metaclass=singleton.Singleton):
def enumerate_events(
self,
owner_type: typing.Union[types.stats.EventOwnerType, collections.abc.Iterable[types.stats.EventOwnerType]],
owner_type: typing.Union[
types.stats.EventOwnerType, collections.abc.Iterable[types.stats.EventOwnerType]
],
event_type: typing.Union[types.stats.EventType, collections.abc.Iterable[types.stats.EventType]],
**kwargs
) -> 'models.QuerySet[StatsEvents]':

View File

@ -79,7 +79,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
@staticmethod
def get_state_filter(service: 'models.Service') -> Q:
if service.oldMaxAccountingMethod: # If no limits and accounting method is not old one
if service.old_max_accounting_method: # If no limits and accounting method is not old one
# Valid states are: PREPARING, USABLE
states = [State.PREPARING, State.USABLE]
else: # New accounting method selected
@ -290,7 +290,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
State.as_str(cache.state),
State.as_str(cache.os_state),
)
if State.is_runing(state) and cache.isUsable():
if State.is_runing(state) and cache.is_usable():
cache.set_state(State.PREPARING)
# Data will be serialized on makeUnique process
@ -303,7 +303,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
"""
userService.refresh_from_db()
if userService.isPreparing() is False:
if userService.is_preparing() is False:
logger.debug('Cancel requested for a non running operation, performing removal instead')
return self.remove(userService)
@ -334,7 +334,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
with transaction.atomic():
userService = UserService.objects.select_for_update().get(id=userService.id)
operationsLogger.info('Removing userService %a', userService.name)
if userService.isUsable() is False and State.is_removable(userService.state) is False:
if userService.is_usable() is False and State.is_removable(userService.state) is False:
raise OperationException(_('Can\'t remove a non active element'))
userService.set_state(State.REMOVING)
logger.debug("***** The state now is %s *****", State.as_str(userService.state))
@ -350,10 +350,10 @@ class UserServiceManager(metaclass=singleton.Singleton):
return userService
def remove_or_cancel(self, userService: UserService):
if userService.isUsable() or State.is_removable(userService.state):
if userService.is_usable() or State.is_removable(userService.state):
return self.remove(userService)
if userService.isPreparing():
if userService.is_preparing():
return self.cancel(userService)
raise OperationException(
@ -442,7 +442,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
# Out of atomic transaction
if cache:
# Early assign
cache.assignToUser(user)
cache.assign_to(user)
logger.debug(
'Found a cached-ready service from %s for user %s, item %s',
@ -485,7 +485,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
# Out of atomic transaction
if cache:
cache.assignToUser(user)
cache.assign_to(user)
logger.debug(
'Found a cached-preparing service from %s for user %s, item %s',
@ -753,7 +753,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
if (
typeTrans
and t.is_ip_allowed(src_ip)
and typeTrans.supportsOs(os.os)
and typeTrans.supports_os(os.os)
and t.is_os_allowed(os.os)
):
transport_id = t.uuid
@ -818,11 +818,11 @@ class UserServiceManager(metaclass=singleton.Singleton):
if ip:
serviceNotReadyCode = 0x0003
transportInstance = transport.get_instance()
if transportInstance.isAvailableFor(userService, ip):
if transportInstance.is_ip_allowed(userService, ip):
log.log(userService, log.LogLevel.INFO, "User service ready", log.LogSource.WEB)
self.notify_preconnect(
userService,
transportInstance.getConnectionInfo(userService, user, ''),
transportInstance.get_connection_info(userService, user, ''),
)
traceLogger.info(
'READY on service "%s" for user "%s" with transport "%s" (ip:%s)',
@ -839,7 +839,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
transportInstance,
)
message = transportInstance.getCustomAvailableErrorMsg(userService, ip)
message = transportInstance.get_available_error_msg(userService, ip)
log.log(userService, log.LogLevel.WARNING, message, log.LogSource.TRANSPORT)
logger.debug(
'Transport is not ready for user service %s: %s',
@ -864,7 +864,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
transport.name,
ip,
)
raise ServiceNotReadyError(code=serviceNotReadyCode, userService=userService, transport=transport)
raise ServiceNotReadyError(code=serviceNotReadyCode, user_service=userService, transport=transport)
def is_meta_service(self, metaId: str) -> bool:
return metaId[0] == 'M'
@ -968,7 +968,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
typeTrans
and t.get_type()
and t.is_ip_allowed(srcIp)
and typeTrans.supportsOs(os.os)
and typeTrans.supports_os(os.os)
and t.is_os_allowed(os.os)
):
found = (pool, t)

View File

@ -30,13 +30,14 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
import typing
from django.utils.translation import gettext_lazy as _
from uds.core.util import config as cfg
# Globals for messagging
DO_NOT_REPEAT = cfg.Config.section(cfg.Config.SectionType.GLOBAL).value(
DO_NOT_REPEAT: typing.Final[cfg.Config.Value] = cfg.Config.section(cfg.Config.SectionType.GLOBAL).value(
'Uniqueness',
'10',
help=_('Number of seconds to ignore repeated messages'),

View File

@ -44,6 +44,7 @@ from .config import DO_NOT_REPEAT
logger = logging.getLogger(__name__)
# Note that this thread will be running on the scheduler process
class MessageProcessorThread(BaseThread):
_keep_running: bool = True

View File

@ -163,7 +163,7 @@ class OSManager(Module):
This function can update userService values. Normal operation will be remove machines if this state is not valid
"""
def isRemovableOnLogout(self, userService: 'UserService') -> bool: # pylint: disable=unused-argument
def is_removableOnLogout(self, userService: 'UserService') -> bool: # pylint: disable=unused-argument
"""
If returns true, when actor notifies "logout", UDS will mark service for removal
can be overriden

View File

@ -43,6 +43,7 @@ class ServiceException(UDSException):
"""
Base class for all service exceptions
"""
def __init__(self, *args, **kwargs):
# Eats "kwargs" to avoid "unexpected keyword argument" error
super().__init__(*args)
@ -110,11 +111,17 @@ class ServiceNotReadyError(ServiceException):
"""
code: int
userService: typing.Optional['UserService']
user_service: typing.Optional['UserService']
transport: typing.Optional['Transport']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.code = kwargs.get('code', 0x0000)
self.userService = kwargs.get('userService', None)
self.transport = kwargs.get('transport', None)
def __init__(
self,
*,
code: int = 0x0000,
user_service: typing.Optional['UserService'] = None,
transport: typing.Optional['Transport'] = None
):
super().__init__()
self.code = code
self.user_service = user_service
self.transport = transport

View File

@ -70,10 +70,10 @@ class Transport(Module):
# Windows
# Macintosh
# Linux
supportedOss: tuple = consts.os.desktopOss # Supported operating systems
supported_oss: tuple = consts.os.desktopOss # Supported operating systems
# If the link to use transport is provided by transport itself
ownLink: bool = False
own_link: bool = False
# Protocol "type". This is not mandatory, but will help
protocol: types.transports.Protocol = types.transports.Protocol.NONE
@ -115,14 +115,14 @@ class Transport(Module):
) -> bool:
return net.test_connectivity(ip, int(port), timeout)
def isAvailableFor(self, userService: 'models.UserService', ip: str) -> bool:
def is_ip_allowed(self, userService: 'models.UserService', ip: str) -> bool:
"""
Checks if the transport is available for the requested destination ip
Override this in yours transports
"""
return False
def getCustomAvailableErrorMsg(self, userService: 'models.UserService', ip: str) -> str:
def get_available_error_msg(self, userService: 'models.UserService', ip: str) -> str:
"""
Returns a customized error message, that will be used when a service fails to check "isAvailableFor"
Override this in yours transports if needed
@ -130,31 +130,31 @@ class Transport(Module):
return f'Not accessible (using service ip {ip})'
@classmethod
def supportsProtocol(cls, protocol: typing.Union[collections.abc.Iterable, str]):
def supports_protocol(cls, protocol: typing.Union[collections.abc.Iterable, str]):
if isinstance(protocol, str):
return protocol.lower() == cls.protocol.lower()
# Not string group of strings
for v in protocol:
if cls.supportsProtocol(v):
if cls.supports_protocol(v):
return True
return False
@classmethod
def supportsOs(cls, osType: types.os.KnownOS) -> bool:
def supports_os(cls, osType: types.os.KnownOS) -> bool:
"""
Helper method to check if transport supports requested operating system.
Class method
"""
return osType in cls.supportedOss
return osType in cls.supported_oss
@classmethod
def providesConnetionInfo(cls) -> bool:
def provides_connetion_info(cls) -> bool:
"""
Helper method to check if transport provides information about connection
"""
return cls.getConnectionInfo is not Transport.getConnectionInfo
return cls.get_connection_info is not Transport.get_connection_info
def getConnectionInfo(
def get_connection_info(
self,
userService: typing.Union['models.UserService', 'models.ServicePool'],
user: 'models.User',
@ -257,7 +257,7 @@ class Transport(Module):
parameters=transport_script.parameters,
)
def getRelativeScript(
def get_relative_script(
self, scriptName: str, params: collections.abc.Mapping[str, typing.Any]
) -> types.transports.TransportScript:
"""Returns a script that will be executed on client, but will be downloaded from server
@ -285,7 +285,7 @@ class Transport(Module):
parameters=params,
)
def getScript(
def get_script(
self,
osName: str,
type: typing.Literal['tunnel', 'direct'],
@ -294,7 +294,7 @@ class Transport(Module):
"""
Returns a script for the given os and type
"""
return self.getRelativeScript(f'scripts/{osName.lower()}/{type}.py', params)
return self.get_relative_script(f'scripts/{osName.lower()}/{type}.py', params)
def getLink(
self,

View File

@ -47,7 +47,7 @@ class Tab(enum.StrEnum):
MFA = gettext_noop('MFA')
@staticmethod
def fromStr(value: typing.Optional[str]) -> typing.Union['Tab', str, None]:
def from_str(value: typing.Optional[str]) -> typing.Union['Tab', str, None]:
"""Returns a Tab from a string
If value is not a valid Tab, returns Tab.PARAMETERS
@ -77,7 +77,7 @@ class FieldType(enum.StrEnum):
INFO = 'internal-info'
@staticmethod
def fromStr(value: str) -> 'FieldType':
def from_str(value: str) -> 'FieldType':
"""Returns a FieldType from a string
If value is not a valid FieldType, returns FieldType.TEXT
@ -105,7 +105,7 @@ class FieldPatternType(enum.StrEnum):
class Filler(typing.TypedDict):
callbackName: str
callback_name: str
parameters: list[str]
function: typing.NotRequired[collections.abc.Callable[..., typing.Any]]

View File

@ -49,7 +49,6 @@ from django.utils.translation import gettext as _
from uds.core import consts, exceptions, types
from uds.core.managers.crypto import UDSK, CryptoManager
from uds.core.util import serializer, validators, ensure
from uds.core.util.decorators import deprecatedClassValue
logger = logging.getLogger(__name__)
@ -305,7 +304,7 @@ class gui:
default=default if not callable(default) else default,
readonly=kwargs.get('readonly'),
value=kwargs.get('value') if kwargs.get('value') is not None else default,
tab=types.ui.Tab.fromStr(kwargs.get('tab')),
tab=types.ui.Tab.from_str(kwargs.get('tab')),
)
@property
@ -951,7 +950,7 @@ class gui:
There is an extra option available for this kind of field:
fills: This options is a dictionary that contains this fields:
* 'callbackName' : Callback name for invocation via the specific
* 'callback_name' : Callback name for invocation via the specific
method xml-rpc. This name is a name we assign to this callback,
and is used to locate the method when callback is invoked from
admin interface.
@ -1003,7 +1002,7 @@ class gui:
resourcePool = gui.ChoiceField(
label=_("Resource Pool"), readonly = False, order = 5,
fills = {
'callbackName' : 'vcFillMachinesFromResource',
'callback_name' : 'vcFillMachinesFromResource',
'function' : VCHelpers.getMachines,
'parameters' : ['vc', 'ev', 'resourcePool']
},
@ -1052,14 +1051,14 @@ class gui:
self._fieldsInfo.choices = gui.convertToChoices(choices or [])
# if has fillers, set them
if fills:
if 'function' not in fills or 'callbackName' not in fills:
if 'function' not in fills or 'callback_name' not in fills:
raise ValueError('Invalid fills parameters')
fnc = fills['function']
fills.pop('function')
self._fieldsInfo.fills = fills
# Store it only if not already present
if fills['callbackName'] not in gui.callbacks:
gui.callbacks[fills['callbackName']] = fnc
if fills['callback_name'] not in gui.callbacks:
gui.callbacks[fills['callback_name']] = fnc
def setChoices(self, values: collections.abc.Iterable[typing.Union[str, types.ui.ChoiceItem]]):
"""

View File

@ -40,11 +40,11 @@ from uds.core.util import ensure
logger = logging.getLogger(__name__)
def validateRegexField(field: ui.gui.TextField, fieldValue: typing.Optional[str] = None):
def validateRegexField(field: ui.gui.TextField, field_value: typing.Optional[str] = None):
"""
Validates the multi line fields refering to attributes
"""
value: str = fieldValue or field.value
value: str = field_value or field.value
if value.strip() == '':
return # Ok, empty

View File

@ -41,6 +41,7 @@ from django.db.utils import OperationalError
from uds.models.cache import Cache as DBCache
from uds.core.util.model import sql_datetime
from uds.core.util import serializer
from uds.core import consts
from .hash import hash_key
@ -52,11 +53,6 @@ class Cache:
hits = 0
misses = 0
# Some aliases
DEFAULT_VALIDITY = 60
SHORT_VALIDITY = 5
LONG_VALIDITY = 3600
_owner: str
_bowner: bytes
@ -155,7 +151,8 @@ class Cache:
self.remove(key)
def clear(self) -> None:
Cache.delete(self._owner)
with transaction.atomic():
Cache.delete(self._owner)
def put(
self,
@ -165,30 +162,26 @@ class Cache:
) -> None:
# logger.debug('Saving key "%s" for cache "%s"' % (skey, self._owner,))
if validity is None:
validity = Cache.DEFAULT_VALIDITY
validity = consts.system.DEFAULT_CACHE_TIMEOUT
key = self.__getKey(skey)
strValue = Cache._serializer(value)
now = sql_datetime()
try:
DBCache.objects.create(
owner=self._owner,
key=key,
value=strValue,
created=now,
validity=validity,
) # @UndefinedVariable
except Exception:
# Remove existing if any and create a new one
with transaction.atomic():
try:
# Already exists, modify it
c: DBCache = DBCache.objects.get(pk=key) # @UndefinedVariable
c.owner = self._owner
c.key = key
c.value = strValue
c.created = now
c.validity = validity
c.save()
except transaction.TransactionManagementError:
logger.debug('Transaction in course, cannot store value')
# Remove if existing
DBCache.objects.filter(pk=key).delete()
# And create a new one
DBCache.objects.create(
owner=self._owner,
key=key,
value=strValue,
created=now,
validity=validity,
) # @UndefinedVariable
return # And return
except Exception as e:
logger.debug('Transaction in course, cannot store value: %s', e)
def __setitem__(self, key: typing.Union[str, bytes], value: typing.Any) -> None:
"""
@ -200,26 +193,29 @@ class Cache:
# logger.debug('Refreshing key "%s" for cache "%s"' % (skey, self._owner,))
try:
key = self.__getKey(skey)
c = DBCache.objects.get(pk=key) # @UndefinedVariable
c = DBCache.objects.get(pk=key)
c.created = sql_datetime()
c.save()
except DBCache.DoesNotExist: # @UndefinedVariable
except DBCache.DoesNotExist:
logger.debug('Can\'t refresh cache key %s because it doesn\'t exists', skey)
return
@staticmethod
def purge() -> None:
DBCache.objects.all().delete() # @UndefinedVariable
with transaction.atomic():
DBCache.objects.all().delete()
@staticmethod
def cleanUp() -> None:
DBCache.cleanUp() # @UndefinedVariable
def purge_outdated() -> None:
# purge_outdated has a transaction.atomic() inside
DBCache.purge_outdated()
@staticmethod
def delete(owner: typing.Optional[str] = None) -> None:
# logger.info("Deleting cache items")
if owner is None:
objects = DBCache.objects.all() # @UndefinedVariable
else:
objects = DBCache.objects.filter(owner=owner) # @UndefinedVariable
objects.delete()
with transaction.atomic():
# logger.info("Deleting cache items")
if owner is None:
objects = DBCache.objects.all()
else:
objects = DBCache.objects.filter(owner=owner)
objects.delete()

View File

@ -68,7 +68,7 @@ class CalendarChecker:
def __init__(self, calendar: Calendar) -> None:
self.calendar = calendar
def _updateData(self, dtime: datetime.datetime) -> bitarray.bitarray:
def _gen_state_on_minute(self, dtime: datetime.datetime) -> bitarray.bitarray:
logger.debug('Updating %s', dtime)
CalendarChecker.updates += 1
@ -115,19 +115,19 @@ class CalendarChecker:
return data
def _updateEvents(
self, checkFrom: datetime.datetime, startEvent: bool = True
def _update_events(
self, check_from: datetime.datetime, startEvent: bool = True
) -> typing.Optional[datetime.datetime]:
next_event = None
for rule in self.calendar.rules.all():
# logger.debug('RULE: start = {}, checkFrom = {}, end'.format(rule.start.date(), checkFrom.date()))
if rule.end is not None and rule.end < checkFrom.date():
if rule.end is not None and rule.end < check_from.date():
continue
# logger.debug('Rule in check interval...')
if startEvent:
event = rule.as_rrule().after(checkFrom) # At start
event = rule.as_rrule().after(check_from) # At start
else:
event = rule.as_rrule_end().after(checkFrom) # At end
event = rule.as_rrule_end().after(check_from) # At end
if event and (next_event is None or next_event > event):
next_event = event
@ -143,66 +143,67 @@ class CalendarChecker:
dtime = sql_datetime()
# memcached access
memCache = caches['memory']
memcache_storage = caches['memory']
# First, try to get data from cache if it is valid
cacheKey = CalendarChecker._cacheKey(
cache_key = CalendarChecker._gen_cache_key(
str(self.calendar.modified) + str(dtime.date()) + (self.calendar.uuid or '') + 'checker'
)
# First, check "local memory cache", and if not found, from DB cache
cached = memCache.get(cacheKey)
cached = memcache_storage.get(cache_key)
if not cached:
cached = CalendarChecker.cache.get(cacheKey, None)
cached = CalendarChecker.cache.get(cache_key, None)
if cached:
memCache.set(cacheKey, cached, ONE_DAY)
memcache_storage.set(cache_key, cached, ONE_DAY)
# state_per_minute is a bitarray with 24*60 bits, one for each minute of the day
if cached:
data = bitarray.bitarray() # Empty bitarray
data.frombytes(cached)
state_on_minute = bitarray.bitarray() # Empty bitarray
state_on_minute.frombytes(cached)
CalendarChecker.cache_hit += 1
else:
data = self._updateData(dtime)
state_on_minute = self._gen_state_on_minute(dtime)
# Now data can be accessed as an array of booleans.
# Store data on persistent cache
CalendarChecker.cache.put(cacheKey, data.tobytes(), ONE_DAY)
memCache.set(cacheKey, data.tobytes(), ONE_DAY)
CalendarChecker.cache.put(cache_key, state_on_minute.tobytes(), ONE_DAY)
memcache_storage.set(cache_key, state_on_minute.tobytes(), ONE_DAY)
return bool(data[dtime.hour * 60 + dtime.minute])
return bool(state_on_minute[dtime.hour * 60 + dtime.minute])
def nextEvent(
def next_event(
self,
checkFrom: typing.Optional[datetime.datetime] = None,
startEvent: bool = True,
check_from: typing.Optional[datetime.datetime] = None,
start_event: bool = True,
offset: typing.Optional[datetime.timedelta] = None,
) -> typing.Optional[datetime.datetime]:
"""
Returns next event for this interval
"""
logger.debug('Obtaining nextEvent')
if not checkFrom:
checkFrom = sql_datetime()
if not check_from:
check_from = sql_datetime()
if not offset:
offset = datetime.timedelta(minutes=0)
cacheKey = CalendarChecker._cacheKey(
cache_key = CalendarChecker._gen_cache_key(
str(self.calendar.modified)
+ (self.calendar.uuid or '')
+ str(offset.seconds)
+ str(checkFrom)
+ str(check_from)
+ 'event'
+ ('x' if startEvent else '_')
+ ('x' if start_event else '_')
)
next_event: typing.Optional[datetime.datetime] = CalendarChecker.cache.get(cacheKey, None)
next_event: typing.Optional[datetime.datetime] = CalendarChecker.cache.get(cache_key, None)
if not next_event:
logger.debug('Regenerating cached nextEvent')
next_event = self._updateEvents(
checkFrom + offset, startEvent
next_event = self._update_events(
check_from + offset, start_event
) # We substract on checkin, so we can take into account for next execution the "offset" on start & end (just the inverse of current, so we substract it)
if next_event:
next_event += offset
CalendarChecker.cache.put(cacheKey, next_event, 3600)
CalendarChecker.cache.put(cache_key, next_event, 3600)
else:
logger.debug('nextEvent cache hit')
CalendarChecker.hits += 1
@ -213,7 +214,7 @@ class CalendarChecker:
return f'Calendar checker for {self.calendar}'
@staticmethod
def _cacheKey(key: str) -> str:
def _gen_cache_key(key: str) -> str:
# Returns a valid cache key for all caching backends (memcached, redis, or whatever)
# Simple, fastest algorihm is to use md5
return hashlib.md5(key.encode('utf-8')).hexdigest() # nosec simple fast algorithm for cache keys

View File

@ -88,7 +88,7 @@ class Config:
OTHER = 'Other'
@staticmethod
def fromStr(value: str) -> 'Config.SectionType':
def from_str(value: str) -> 'Config.SectionType':
if value in list(Config.SectionType.values()):
return Config.SectionType(value)
return Config.SectionType(Config.SectionType.OTHER)
@ -309,9 +309,9 @@ class Config:
continue
logger.debug('%s.%s:%s,%s', cfg.section, cfg.key, cfg.value, cfg.field_type)
if cfg.crypt:
val = Config.section(Config.SectionType.fromStr(cfg.section)).valueCrypt(cfg.key)
val = Config.section(Config.SectionType.from_str(cfg.section)).valueCrypt(cfg.key)
else:
val = Config.section(Config.SectionType.fromStr(cfg.section)).value(cfg.key)
val = Config.section(Config.SectionType.from_str(cfg.section)).value(cfg.key)
yield val
@staticmethod

View File

@ -28,6 +28,7 @@
"""
Author: Adolfo Gómez, dkmaster at dkmon dot com
"""
import dataclasses
import functools
import hashlib
import inspect
@ -51,8 +52,9 @@ RT = typing.TypeVar('RT')
blockCache = Cache('uds:blocker') # One year
# Caching statistics
class StatsType:
class CacheStats:
__slots__ = ('hits', 'misses', 'total', 'start_time', 'saving_time')
hits: int
@ -97,10 +99,11 @@ class StatsType:
)
stats = StatsType()
stats = CacheStats()
class CacheInfo(typing.NamedTuple):
@dataclasses.dataclass
class CacheInfo:
"""
Cache info
"""
@ -134,7 +137,7 @@ def deprecated(func: collections.abc.Callable[..., RT]) -> collections.abc.Calla
return new_func
def deprecatedClassValue(newVarName: str) -> collections.abc.Callable:
def deprecated_class_value(newVarName: str) -> collections.abc.Callable:
"""
Decorator to make a class value deprecated and warn about it
@ -170,7 +173,7 @@ def deprecatedClassValue(newVarName: str) -> collections.abc.Callable:
return functools.partial(innerDeprecated, newVarName=newVarName)
def ensureConnected(func: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
def ensure_connected(func: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
"""This decorator calls "connect" method of the class of the wrapped object"""
@functools.wraps(func)
@ -184,43 +187,39 @@ def ensureConnected(func: collections.abc.Callable[..., RT]) -> collections.abc.
# Decorator for caching
# This decorator will cache the result of the function for a given time, and given parameters
def cached(
cachePrefix: str,
cacheTimeout: typing.Union[collections.abc.Callable[[], int], int] = -1,
cachingArgs: typing.Optional[typing.Union[collections.abc.Iterable[int], int]] = None,
cachingKWArgs: typing.Optional[typing.Union[collections.abc.Iterable[str], str]] = None,
cachingKeyFnc: typing.Optional[collections.abc.Callable[[typing.Any], str]] = None,
prefix: str,
timeout: typing.Union[collections.abc.Callable[[], int], int] = -1,
args: typing.Optional[typing.Union[collections.abc.Iterable[int], int]] = None,
kwargs: typing.Optional[typing.Union[collections.abc.Iterable[str], str]] = None,
key_fnc: typing.Optional[collections.abc.Callable[[typing.Any], str]] = None,
) -> collections.abc.Callable[[collections.abc.Callable[..., RT]], collections.abc.Callable[..., RT]]:
"""Decorator that give us a "quick& clean" caching feature on db.
The "cached" element must provide a "cache" variable, which is a cache object
Parameters:
cachePrefix: Prefix to use for cache key
cacheTimeout: Timeout for cache
cachingArgs: List of arguments to use for cache key (i.e. [0, 1] will use first and second argument for cache key, 0 will use "self" if a method, and 1 will use first argument)
cachingKWArgs: List of keyword arguments to use for cache key (i.e. ['a', 'b'] will use "a" and "b" keyword arguments for cache key)
cachingKeyFnc: Function to use for cache key. If provided, this function will be called with the same arguments as the wrapped function, and must return a string to use as cache key
prefix: Prefix to use for cache key
timeout: Timeout for cache
args: List of arguments to use for cache key (i.e. [0, 1] will use first and second argument for cache key, 0 will use "self" if a method, and 1 will use first argument)
kwargs: List of keyword arguments to use for cache key (i.e. ['a', 'b'] will use "a" and "b" keyword arguments for cache key)
key_fnc: Function to use for cache key. If provided, this function will be called with the same arguments as the wrapped function, and must return a string to use as cache key
Note:
If cachingArgs and cachingKWArgs are not provided, the whole arguments will be used for cache key
If args and cachingKWArgs are not provided, the whole arguments will be used for cache key
"""
cacheTimeout = Cache.DEFAULT_VALIDITY if cacheTimeout == -1 else cacheTimeout
cachingArgList: list[int] = (
[cachingArgs] if isinstance(cachingArgs, int) else list(cachingArgs or [])
)
cachingKwargList: list[str] = (
isinstance(cachingKWArgs, str) and [cachingKWArgs] or list(cachingKWArgs or [])
)
timeout = consts.system.DEFAULT_CACHE_TIMEOUT if timeout == -1 else timeout
args_list: list[int] = [args] if isinstance(args, int) else list(args or [])
kwargs_list: list[str] = isinstance(kwargs, str) and [kwargs] or list(kwargs or [])
lock = threading.Lock()
hits = misses = exec_time = 0
def allowCacheDecorator(fnc: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
def allow_cache_decorator(fnc: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
# If no caching args and no caching kwargs, we will cache the whole call
# If no parameters provider, try to infer them from function signature
try:
if cachingArgList is None and cachingKwargList is None:
if args_list is None and kwargs_list is None:
for pos, (paramName, param) in enumerate(inspect.signature(fnc).parameters.items()):
if paramName == 'self':
continue
@ -229,42 +228,43 @@ def cached(
inspect.Parameter.POSITIONAL_OR_KEYWORD,
inspect.Parameter.POSITIONAL_ONLY,
):
cachingArgList.append(pos)
args_list.append(pos)
elif param.kind in (
inspect.Parameter.KEYWORD_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
):
cachingKwargList.append(paramName)
kwargs_list.append(paramName)
# *args and **kwargs are not supported
except Exception: # nosec
pass # Not inspectable, no caching
keyFnc = cachingKeyFnc or (lambda x: fnc.__name__)
lkey_fnc = key_fnc or (lambda x: fnc.__name__)
@functools.wraps(fnc)
def wrapper(*args, **kwargs) -> RT:
with transaction.atomic(): # On its own transaction (for cache operations, that are on DB)
nonlocal hits, misses, exec_time
keyHash = hashlib.sha256(usedforsecurity=False)
for i in cachingArgList:
nonlocal hits, misses, exec_time
with transaction.atomic(): # On its own transaction (for cache operations, that are on DB)
key_hash = hashlib.sha256(usedforsecurity=False)
for i in args_list:
if i < len(args):
keyHash.update(str(args[i]).encode('utf-8'))
for s in cachingKwargList:
keyHash.update(str(kwargs.get(s, '')).encode('utf-8'))
key_hash.update(str(args[i]).encode('utf-8'))
for s in kwargs_list:
key_hash.update(str(kwargs.get(s, '')).encode('utf-8'))
# Append key data
keyHash.update(keyFnc(args[0] if len(args) > 0 else fnc.__name__).encode('utf-8'))
key_hash.update(lkey_fnc(args[0] if len(args) > 0 else fnc.__name__).encode('utf-8'))
# compute cache key
cacheKey = f'{cachePrefix}-{keyHash.hexdigest()}'
cache_key = f'{prefix}-{key_hash.hexdigest()}'
# Get cache from object, or create a new one (generic, common to all objects)
cache = getattr(args[0], 'cache', None) or Cache('functionCache')
cache: 'Cache' = getattr(args[0], 'cache', None) or Cache('functionCache')
# if cacheTimeout is a function, call it
timeout = cacheTimeout() if callable(cacheTimeout) else cacheTimeout
# if timeout is a function, call it
ltimeout = timeout() if callable(timeout) else timeout
data: typing.Any = None
if not kwargs.get('force', False) and timeout > 0:
data = cache.get(cacheKey)
# If misses is 0, we are starting, so we will not try to get from cache
if not kwargs.get('force', False) and ltimeout > 0 and misses > 0:
data = cache.get(cache_key)
if data:
with lock:
hits += 1
@ -287,11 +287,11 @@ def cached(
try:
# Maybe returned data is not serializable. In that case, cache will fail but no harm is done with this
cache.put(cacheKey, data, timeout)
cache.put(cache_key, data, ltimeout)
except Exception as e:
logger.debug(
'Data for %s is not serializable on call to %s, not cached. %s (%s)',
cacheKey,
cache_key,
fnc.__name__,
data,
e,
@ -315,7 +315,7 @@ def cached(
return wrapper
return allowCacheDecorator
return allow_cache_decorator
# Decorator to execute method in a thread
@ -351,6 +351,7 @@ def blocker(
"""
from uds.REST.exceptions import AccessDenied # To avoid circular references
max_failures = max_failures or consts.system.ALLOWED_FAILS
def decorator(f: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
@ -392,7 +393,9 @@ def blocker(
return decorator
def profile(log_file: typing.Optional[str] = None) -> collections.abc.Callable[[collections.abc.Callable[..., RT]], collections.abc.Callable[..., RT]]:
def profile(
log_file: typing.Optional[str] = None,
) -> collections.abc.Callable[[collections.abc.Callable[..., RT]], collections.abc.Callable[..., RT]]:
"""
Decorator that will profile the wrapped function and log the results to the provided file
@ -402,6 +405,7 @@ def profile(log_file: typing.Optional[str] = None) -> collections.abc.Callable[[
Returns:
Decorator
"""
def decorator(f: collections.abc.Callable[..., RT]) -> collections.abc.Callable[..., RT]:
@functools.wraps(f)
def wrapper(*args: typing.Any, **kwargs: typing.Any) -> RT:
@ -422,4 +426,4 @@ def profile(log_file: typing.Optional[str] = None) -> collections.abc.Callable[[
return wrapper
return decorator
return decorator

View File

@ -51,7 +51,7 @@ logger = logging.getLogger(__name__)
# ******************************************************
def _serverGroupValues(
def _server_group_values(
types_: collections.abc.Iterable[types.servers.ServerType], subtype: typing.Optional[str] = None
) -> list[types.ui.ChoiceItem]:
fltr = models.ServerGroup.objects.filter(
@ -66,7 +66,7 @@ def _serverGroupValues(
]
def _serverGrpFromField(fld: ui.gui.ChoiceField) -> models.ServerGroup:
def _server_group_from_field(fld: ui.gui.ChoiceField) -> models.ServerGroup:
try:
return models.ServerGroup.objects.get(uuid=fld.value)
except Exception:
@ -74,25 +74,25 @@ def _serverGrpFromField(fld: ui.gui.ChoiceField) -> models.ServerGroup:
# Tunnel server field
def tunnelField() -> ui.gui.ChoiceField:
def tunnel_field() -> ui.gui.ChoiceField:
"""Returns a field to select a tunnel server"""
return ui.gui.ChoiceField(
label=_('Tunnel server'),
order=1,
tooltip=_('Tunnel server to use'),
required=True,
choices=functools.partial(_serverGroupValues, [types.servers.ServerType.TUNNEL]),
choices=functools.partial(_server_group_values, [types.servers.ServerType.TUNNEL]),
tab=types.ui.Tab.TUNNEL,
)
def getTunnelFromField(fld: ui.gui.ChoiceField) -> models.ServerGroup:
def get_tunnel_from_field(fld: ui.gui.ChoiceField) -> models.ServerGroup:
"""Returns a tunnel server from a field"""
return _serverGrpFromField(fld)
return _server_group_from_field(fld)
# Server group field
def serverGroupField(
def server_group_field(
type: typing.Optional[list[types.servers.ServerType]] = None,
subtype: typing.Optional[str] = None,
tab: typing.Optional[types.ui.Tab] = None,
@ -112,22 +112,22 @@ def serverGroupField(
order=2,
tooltip=_('Server group to use'),
required=True,
choices=functools.partial(_serverGroupValues, type, subtype), # So it gets evaluated at runtime
choices=functools.partial(_server_group_values, type, subtype), # So it gets evaluated at runtime
tab=tab,
)
def getServerGroupFromField(fld: ui.gui.ChoiceField) -> models.ServerGroup:
def get_server_group_from_field(fld: ui.gui.ChoiceField) -> models.ServerGroup:
"""Returns a server group from a field
Args:
fld: Field to get server group from
"""
return _serverGrpFromField(fld)
return _server_group_from_field(fld)
# Ticket validity time field (for http related tunnels)
def tunnelTicketValidityField() -> ui.gui.NumericField:
def tunnel_ricket_validity_field() -> ui.gui.NumericField:
return ui.gui.NumericField(
length=3,
label=_('Ticket Validity'),
@ -143,7 +143,7 @@ def tunnelTicketValidityField() -> ui.gui.NumericField:
# Tunnel wait time (for uds client related tunnels)
def tunnelTunnelWait(order: int = 2) -> ui.gui.NumericField:
def tunnel_runnel_wait(order: int = 2) -> ui.gui.NumericField:
return ui.gui.NumericField(
length=3,
label=_('Tunnel wait time'),
@ -158,12 +158,12 @@ def tunnelTunnelWait(order: int = 2) -> ui.gui.NumericField:
# Get certificates from field
def getCertificatesFromField(
field: ui.gui.TextField, fieldValue: typing.Optional[str] = None
def get_vertificates_from_field(
field: ui.gui.TextField, field_value: typing.Optional[str] = None
) -> list['Certificate']:
# Get certificates in self.publicKey.value, encoded as PEM
# Return a list of certificates in DER format
value = (fieldValue or field.value).strip()
value = (field_value or field.value).strip()
if value == '':
return []

View File

@ -43,7 +43,7 @@ if typing.TYPE_CHECKING:
logger = logging.getLogger(__name__)
def udsLink(request: 'HttpRequest', ticket: str, scrambler: str) -> str:
def uds_link(request: 'HttpRequest', ticket: str, scrambler: str) -> str:
# Removed http support, so only udss:// links are generated
# If we have a scheme, remove it
@ -54,7 +54,7 @@ def udsLink(request: 'HttpRequest', ticket: str, scrambler: str) -> str:
return f'udss://{rel}{ticket}/{scrambler}'
def udsAccessLink(
def uds_access_link(
request: 'HttpRequest', # pylint: disable=unused-argument
serviceId: str,
transportId: typing.Optional[str],
@ -65,7 +65,7 @@ def udsAccessLink(
return f'udsa://{serviceId}/{transportId or "meta"}'
def parseDate(dateToParse) -> datetime.date:
def parse_date(dateToParse) -> datetime.date:
if get_language() == 'fr':
date_format = '%d/%m/%Y'
else:
@ -76,7 +76,7 @@ def parseDate(dateToParse) -> datetime.date:
return datetime.datetime.strptime(dateToParse, date_format).date()
def dateToLiteral(date) -> str:
def date_to_literal(date) -> str:
# Fix for FR lang for datepicker
if get_language() == 'fr':
date = date.strftime('%d/%m/%Y')
@ -86,12 +86,14 @@ def dateToLiteral(date) -> str:
return date
def extractKey(dictionary: dict, key: typing.Any, **kwargs) -> str:
format_ = kwargs.get('format', '{0}')
default = kwargs.get('default', '')
def extract_key(
dictionary: dict, key: typing.Any, fmt: typing.Optional[str] = None, default: typing.Any = None
):
fmt = fmt or '{0}'
default = default or ''
if key in dictionary:
value = format_.format(dictionary[key])
value = fmt.format(dictionary[key])
del dictionary[key]
else:
value = default

View File

@ -193,7 +193,7 @@ def connection(
raise LDAPError(_('Unknown error'))
def getAsDict(
def as_dict(
con: 'LDAPObject',
base: str,
ldapFilter: str,
@ -249,7 +249,7 @@ def getAsDict(
yield dct
def getFirst(
def first(
con: 'LDAPObject',
base: str,
objectClass: str,
@ -271,7 +271,7 @@ def getFirst(
ldapFilter = f'(&(objectClass={objectClass})({field}={value}))'
try:
obj = next(getAsDict(con, base, ldapFilter, attrList, sizeLimit))
obj = next(as_dict(con, base, ldapFilter, attrList, sizeLimit))
except StopIteration:
return None # None found
@ -292,14 +292,14 @@ def recursive_delete(con: 'LDAPObject', base_dn: str) -> None:
con.delete_s(base_dn)
def getRootDSE(con: 'LDAPObject') -> typing.Optional[LDAPResultType]:
def get_root_dse(con: 'LDAPObject') -> typing.Optional[LDAPResultType]:
"""
Gets the root DSE of the LDAP server
@param cont: Connection to LDAP server
@return: None if root DSE is not found, an dictionary of LDAP entry attributes if found (all in unicode on py2, str on py3).
"""
return next(
getAsDict(
as_dict(
con=con,
base='',
ldapFilter='(objectClass=*)',

View File

@ -73,7 +73,7 @@ class LogLevel(enum.IntEnum):
return self.name
@staticmethod
def fromStr(level: str) -> 'LogLevel':
def from_str(level: str) -> 'LogLevel':
try:
return LogLevel[level.upper()]
except KeyError:

View File

@ -61,7 +61,7 @@ class TimeTrack:
misses: typing.ClassVar[int] = 0
@staticmethod
def _fetchSqlDatetime() -> datetime.datetime:
def _fetch_sql_datetime() -> datetime.datetime:
"""Returns the current date/time of the database server.
We use this time as method to keep all operations betwen different servers in sync.
@ -97,7 +97,7 @@ class TimeTrack:
if diff > datetime.timedelta(seconds=CACHE_TIME_TIMEOUT) or diff < datetime.timedelta(seconds=0):
TimeTrack.last_check = now
TimeTrack.misses += 1
TimeTrack.cached_time = TimeTrack._fetchSqlDatetime()
TimeTrack.cached_time = TimeTrack._fetch_sql_datetime()
else:
TimeTrack.hits += 1
return TimeTrack.cached_time + (now - TimeTrack.last_check)
@ -135,7 +135,7 @@ def generate_uuid(obj: typing.Any = None) -> str:
return CryptoManager().uuid(obj=obj).lower()
def processUuid(uuid: str) -> str:
def process_uuid(uuid: str) -> str:
if isinstance(uuid, bytes):
uuid = uuid.decode('utf8') # type: ignore
return uuid.lower()

View File

@ -55,7 +55,7 @@ if typing.TYPE_CHECKING:
from uds.core.util.factory import ModuleFactory
def loadModulesUrls() -> list[typing.Any]:
def get_urlpatterns_from_modules() -> list[typing.Any]:
"""Loads dipatcher modules urls to add to django urlpatterns
Returns:
@ -86,7 +86,7 @@ def loadModulesUrls() -> list[typing.Any]:
return patterns
def importModules(modName: str, *, packageName: typing.Optional[str] = None) -> None:
def import_modules(modName: str, *, packageName: typing.Optional[str] = None) -> None:
"""Dinamycally import children of package
Args:
@ -117,7 +117,7 @@ def importModules(modName: str, *, packageName: typing.Optional[str] = None) ->
importlib.invalidate_caches()
def dynamicLoadAndRegisterPackages(
def dynamically_load_and_register_packages(
adder: collections.abc.Callable[[type[V]], None],
type_: type[V],
modName: str,
@ -143,7 +143,7 @@ def dynamicLoadAndRegisterPackages(
return cls.__name__.startswith('MyClass')
'''
# Ensures all modules under modName (and optionally packageName) are imported
importModules(modName, packageName=packageName)
import_modules(modName, packageName=packageName)
checkFnc = checker or (lambda x: True)
@ -172,8 +172,8 @@ def dynamicLoadAndRegisterPackages(
logger.info('* Done Registering %s', modName)
def dynamicLoadAndRegisterModules(
factory: 'ModuleFactory',
def dynamically_load_and_register_modules(
factory: 'ModuleFactory[T]',
type_: type[T],
modName: str,
) -> None:
@ -186,6 +186,6 @@ def dynamicLoadAndRegisterModules(
type_ (type[T]): Type of the objects to load
modName (str): Name of the package to load
'''
dynamicLoadAndRegisterPackages(
dynamically_load_and_register_packages(
factory.insert, type_, modName, checker=lambda x: not x.is_base
)

View File

@ -30,12 +30,12 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
import re
import socket
import logging
import typing
import collections.abc
import ipaddress
import logging
import re
import socket
import typing
class IpType(typing.NamedTuple):

View File

@ -55,14 +55,14 @@ def detect_os(
# First, try to detect from Sec-Ch-Ua-Platform
# Remember all Sec... headers are only available on secure connections
secChUaPlatform = headers.get('Sec-Ch-Ua-Platform')
sec_ch_ua_platform = headers.get('Sec-Ch-Ua-Platform')
found = types.os.KnownOS.UNKNOWN
if secChUaPlatform is not None:
if sec_ch_ua_platform is not None:
# Strip initial and final " chars if present
secChUaPlatform = secChUaPlatform.strip('"')
sec_ch_ua_platform = sec_ch_ua_platform.strip('"')
for os in consts.os.KNOWN_OS_LIST:
if secChUaPlatform in os.value:
if sec_ch_ua_platform in os.value:
found = os
break
else: # Try to detect from User-Agent
@ -77,10 +77,10 @@ def detect_os(
res.os = found
# Try to detect browser from Sec-Ch-Ua first
secChUa = headers.get('Sec-Ch-Ua')
if secChUa is not None:
sec_ch_ua = headers.get('Sec-Ch-Ua')
if sec_ch_ua is not None:
for browser in consts.os.known_browsers:
if browser in secChUa:
if browser in sec_ch_ua:
res.browser = browser
break
else:

View File

@ -87,7 +87,7 @@ def effective_permissions(
return PermissionType.NONE
def addUserPermission(
def add_user_permission(
user: 'models.User',
obj: 'Model',
permission: PermissionType = PermissionType.READ,
@ -101,7 +101,7 @@ def addUserPermission(
)
def addGroupPermission(
def add_group_permission(
group: 'models.Group',
obj: 'Model',
permission: PermissionType = PermissionType.READ,
@ -114,7 +114,7 @@ def addGroupPermission(
)
def hasAccess(
def has_access(
user: 'models.User',
obj: 'Model',
permission: PermissionType = PermissionType.ALL,
@ -123,7 +123,7 @@ def hasAccess(
return effective_permissions(user, obj, for_type).includes(permission)
def revokePermissionById(permUUID: str) -> None:
def revoke_permission_by_id(permUUID: str) -> None:
"""Revokes a permision by its uuid
Arguments:

View File

@ -28,6 +28,7 @@
"""
Author: Adolfo Gómez, dkmaster at dkmon dot com
"""
import time
import typing
import collections.abc
import functools
@ -35,8 +36,9 @@ import functools
import dns.resolver
import dns.reversename
from uds.core.util.decorators import cached
@functools.lru_cache(maxsize=512) # Limit the memory used by this cache (512 items)
@cached(prefix='resolver.resolve', timeout=60) # Cache for 1 hour
def resolve(hostname: str) -> list[str]:
"""
Resolves a hostname to a list of ips
@ -50,7 +52,7 @@ def resolve(hostname: str) -> list[str]:
pass
return ips
@functools.lru_cache(maxsize=512) # Limit the memory used by this cache (512 items)
@cached(prefix='resolver.reverse', timeout=60) # Cache for 1 hour
def reverse(ip: str) -> list[str]:
"""
Resolves an ip to a list of hostnames

View File

@ -68,27 +68,27 @@ def match(
And the literals will be ignored
"""
arg_list = list(arg_list) # ensure it is a list
for matcher in args:
if len(arg_list) != len(matcher[0]):
for pattern, function in args:
if len(arg_list) != len(pattern):
continue
# Check if all the arguments match
doMatch = True
matched = True
for i, arg in enumerate(arg_list):
if matcher[0][i].startswith('<') and matcher[0][i].endswith('>'):
if pattern[i].startswith('<') and pattern[i].endswith('>'):
continue
if arg != matcher[0][i]:
doMatch = False
if arg != pattern[i]:
matched = False
break
if doMatch:
if matched:
# All the arguments match, call the callback
return matcher[1](
return function(
*[
arg
for i, arg in enumerate(arg_list)
if matcher[0][i].startswith('<') and matcher[0][i].endswith('>')
if pattern[i].startswith('<') and pattern[i].endswith('>')
]
)

View File

@ -86,59 +86,59 @@ VALID_STATES = [USABLE, PREPARING]
PUBLISH_STATES = [LAUNCHING, PREPARING]
def isActive(state):
def is_active(state):
return state == ACTIVE
def isInactive(state):
def is_inactive(state):
return state == INACTIVE
def isBlocked(state):
def is_blocked(state):
return state == BLOCKED
def isPreparing(state):
def is_preparing(state):
return state == PREPARING
def isUsable(state):
def is_usable(state):
return state == USABLE
def isRemovable(state):
def is_removable(state):
return state == REMOVABLE
def isRemoving(state):
def is_removing(state):
return state == REMOVING
def isRemoved(state):
def is_removed(state):
return state == REMOVED
def isCanceling(state):
def is_canceling(state):
return state == CANCELING
def isCanceled(state):
def is_canceled(state):
return state == CANCELED
def isErrored(state):
def is_errored(state):
return state == ERROR
def isFinished(state):
def is_finished(state):
return state == FINISHED
def isRuning(state):
def is_runing(state):
return state == RUNNING
def isForExecute(state):
def is_for_execute(state):
return state == FOR_EXECUTE

View File

@ -54,22 +54,22 @@ CounterClass = typing.TypeVar('CounterClass', Provider, Service, ServicePool, Au
# Helpers
def _get_Id(obj):
def _get_id(obj):
return obj.id if obj.id != -1 else None
def _get_P_S_Ids(provider) -> tuple:
def _get_prov_serv_ids(provider) -> tuple:
return tuple(i.id for i in provider.services.all())
def _get_S_DS_Ids(service) -> tuple:
def _get_serv_pool_ids(service) -> tuple:
return tuple(i.id for i in service.deployedServices.all())
def _get_P_S_DS_Ids(provider) -> tuple:
def _get_prov_serv_pool_ids(provider) -> tuple:
res: tuple = ()
for i in provider.services.all():
res += _get_S_DS_Ids(i)
res += _get_serv_pool_ids(i)
return res
@ -77,25 +77,25 @@ _id_retriever: typing.Final[
collections.abc.Mapping[type[Model], collections.abc.Mapping[int, collections.abc.Callable]]
] = {
Provider: {
types.stats.CounterType.LOAD: _get_Id,
types.stats.CounterType.STORAGE: _get_P_S_Ids,
types.stats.CounterType.ASSIGNED: _get_P_S_DS_Ids,
types.stats.CounterType.INUSE: _get_P_S_DS_Ids,
types.stats.CounterType.LOAD: _get_id,
types.stats.CounterType.STORAGE: _get_prov_serv_ids,
types.stats.CounterType.ASSIGNED: _get_prov_serv_pool_ids,
types.stats.CounterType.INUSE: _get_prov_serv_pool_ids,
},
Service: {
types.stats.CounterType.STORAGE: _get_Id,
types.stats.CounterType.ASSIGNED: _get_S_DS_Ids,
types.stats.CounterType.INUSE: _get_S_DS_Ids,
types.stats.CounterType.STORAGE: _get_id,
types.stats.CounterType.ASSIGNED: _get_serv_pool_ids,
types.stats.CounterType.INUSE: _get_serv_pool_ids,
},
ServicePool: {
types.stats.CounterType.ASSIGNED: _get_Id,
types.stats.CounterType.INUSE: _get_Id,
types.stats.CounterType.CACHED: _get_Id,
types.stats.CounterType.ASSIGNED: _get_id,
types.stats.CounterType.INUSE: _get_id,
types.stats.CounterType.CACHED: _get_id,
},
Authenticator: {
types.stats.CounterType.AUTH_USERS: _get_Id,
types.stats.CounterType.AUTH_SERVICES: _get_Id,
types.stats.CounterType.AUTH_USERS_WITH_SERVICES: _get_Id,
types.stats.CounterType.AUTH_USERS: _get_id,
types.stats.CounterType.AUTH_SERVICES: _get_id,
types.stats.CounterType.AUTH_USERS_WITH_SERVICES: _get_id,
},
}

View File

@ -30,6 +30,7 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
import dataclasses
import datetime
import stat
import time
@ -125,8 +126,8 @@ def get_owner(
return OSManager.objects.get(pk=ownerId)
return None
class EventTupleType(typing.NamedTuple):
@dataclasses.dataclass
class EventTupleType:
stamp: datetime.datetime
fld1: str
fld2: str

View File

@ -52,7 +52,7 @@ def initialize() -> None:
logger.debug('Registering job: %s', cls.__module__)
task_manager().register_job(cls)
modfinder.dynamicLoadAndRegisterPackages(
modfinder.dynamically_load_and_register_packages(
registerer,
jobs.Job,
__name__

View File

@ -49,7 +49,7 @@ class CacheCleaner(Job):
def run(self) -> None:
logger.debug('Starting cache cleanup')
Cache.cleanUp()
Cache.purge_outdated()
logger.debug('Done cache cleanup')

View File

@ -79,7 +79,7 @@ def guacamole(request: ExtendedHttpRequestWithUser, token: str, tunnelId: str) -
try:
userService = UserService.objects.get(uuid=ti['userService'])
if not userService.isUsable():
if not userService.is_usable():
raise Exception() # Not usable, so we will not use it :)
user = userService.user
# check service owner is the same as the one that requested the ticket

View File

@ -38,7 +38,7 @@ from django.http import HttpResponse, HttpRequest
from uds.REST.methods import actor_v3
from uds.core.auths import auth
from uds.models import UserService
from uds.core.util.model import processUuid
from uds.core.util.model import process_uuid
from uds.core.util import states
logger = logging.getLogger(__name__)
@ -58,7 +58,7 @@ def opengnsys(
def getUserService() -> typing.Optional[UserService]:
try:
userService = UserService.objects.get(uuid=processUuid(uuid), state=states.userService.USABLE)
userService = UserService.objects.get(uuid=process_uuid(uuid), state=states.userService.USABLE)
if userService.properties.get('token') == token:
return userService
logger.warning(

View File

@ -55,7 +55,7 @@ class Command(BaseCommand):
sys.stdout.write("Cache...\n")
# UDSs cache
Cache.cleanUp()
Cache.purge_outdated()
# Django caches
cache.clear()

View File

@ -60,7 +60,7 @@ class Command(BaseCommand):
first, value = config.split('=', 1) # Only first = is separator :)
first = first.split('.')
if len(first) == 2:
mod, name = Config.SectionType.fromStr(first[0]), first[1]
mod, name = Config.SectionType.from_str(first[0]), first[1]
else:
mod, name = Config.SectionType.GLOBAL, first[0]
if (

View File

@ -153,7 +153,7 @@ class Command(BaseCommand):
fltr = fltr.filter(state=State.ERROR)
for item in fltr[:max_items]: # at most max_items items
logs = [
f'{l["date"]}: {log.LogLevel.fromStr(l["level"])} [{l["source"]}] - {l["message"]}'
f'{l["date"]}: {log.LogLevel.from_str(l["level"])} [{l["source"]}] - {l["message"]}'
for l in log.get_logs(item)
]
userServices[item.friendly_name] = {

View File

@ -61,7 +61,7 @@ def __loadModules():
"""
from uds.core import mfas # pylint: disable=import-outside-toplevel
modfinder.dynamicLoadAndRegisterModules(mfas.factory(), mfas.MFA, __name__)
modfinder.dynamically_load_and_register_modules(mfas.factory(), mfas.MFA, __name__)
__loadModules()

View File

@ -65,7 +65,7 @@ class Cache(models.Model):
app_label = 'uds'
@staticmethod
def cleanUp() -> None:
def purge_outdated() -> None:
"""
Purges the cache items that are no longer vaild.
"""

View File

@ -444,8 +444,8 @@ class CalendarAction(UUIDModel):
def save(self, *args, **kwargs):
lastExecution = self.last_execution or sql_datetime()
possibleNext = calendar.CalendarChecker(self.calendar).nextEvent(
checkFrom=lastExecution - self.offset, startEvent=self.at_start
possibleNext = calendar.CalendarChecker(self.calendar).next_event(
check_from=lastExecution - self.offset, start_event=self.at_start
)
if possibleNext:
self.next_execution = possibleNext + self.offset

View File

@ -56,7 +56,7 @@ class ManagedObjectModel(UUIDModel):
data = models.TextField(default='')
comments = models.CharField(max_length=256)
_cachedInstance: typing.Optional[Module] = None
_cached_instance: typing.Optional[Module] = None
class Meta(UUIDModel.Meta): # pylint: disable=too-few-public-methods
"""
@ -82,7 +82,7 @@ class ManagedObjectModel(UUIDModel):
if not values and self.data:
obj.deserialize(self.data)
self._cachedInstance = None # Ensures returns correct value on get_instance
self._cached_instance = None # Ensures returns correct value on get_instance
def get_instance(
self, values: typing.Optional[dict[str, str]] = None
@ -101,8 +101,8 @@ class ManagedObjectModel(UUIDModel):
Notes:
Can be overriden
"""
if self._cachedInstance and values is None:
return self._cachedInstance
if self._cached_instance and values is None:
return self._cached_instance
klass = self.get_type()
@ -110,7 +110,7 @@ class ManagedObjectModel(UUIDModel):
obj = klass(env, values)
self.deserialize(obj, values)
self._cachedInstance = obj
self._cached_instance = obj
return obj

View File

@ -91,7 +91,7 @@ class Provider(ManagedObjectModel, TaggingMixin): # type: ignore
prov.set_uuid(self.uuid)
return prov
def isInMaintenance(self) -> bool:
def is_in_maintenance(self) -> bool:
return self.maintenance_mode
@staticmethod

View File

@ -83,7 +83,7 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
max_services_count_type = models.PositiveIntegerField(default=ServicesCountingType.STANDARD)
_cachedInstance: typing.Optional['services.Service'] = None
_cached_instance: typing.Optional['services.Service'] = None
# "fake" declarations for type checking
# objects: 'models.manager.Manager["Service"]'
@ -128,9 +128,9 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
Raises:
"""
if self._cachedInstance and values is None:
if self._cached_instance and values is None:
# logger.debug('Got cached instance instead of deserializing a new one for {}'.format(self.name))
return self._cachedInstance
return self._cached_instance
prov: 'services.ServiceProvider' = self.provider.get_instance()
sType = prov.get_service_by_type(self.data_type)
@ -141,7 +141,7 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
else:
raise Exception(f'Service type of {self.data_type} is not recognized by provider {prov.name}')
self._cachedInstance = obj
self._cached_instance = obj
return obj
@ -165,9 +165,9 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
def maxServicesCountType(self) -> ServicesCountingType:
return ServicesCountingType.from_int(self.max_services_count_type)
def isInMaintenance(self) -> bool:
def is_in_maintenance(self) -> bool:
# orphaned services?
return self.provider.isInMaintenance() if self.provider else True
return self.provider.is_in_maintenance() if self.provider else True
def test_connectivity(self, host: str, port: typing.Union[str, int], timeout: float = 4) -> bool:
return net.test_connectivity(host, int(port), timeout)
@ -187,13 +187,13 @@ class Service(ManagedObjectModel, TaggingMixin): # type: ignore
logger.warning('No actor notification available for user service %s', userService.friendly_name)
@property
def oldMaxAccountingMethod(self) -> bool:
def old_max_accounting_method(self) -> bool:
# Compatibility with old accounting method
# Counts only "creating and running" instances for max limit checking
return self.maxServicesCountType == ServicesCountingType.STANDARD
@property
def newMaxAccountingMethod(self) -> bool:
def new_max_accounting_method(self) -> bool:
# Compatibility with new accounting method,
# Counts EVERYTHING for max limit checking
return self.maxServicesCountType == ServicesCountingType.CONSERVATIVE

View File

@ -271,7 +271,7 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
return False
def is_in_maintenance(self) -> bool:
return self.service.isInMaintenance() if self.service else True
return self.service.is_in_maintenance() if self.service else True
def is_visible(self) -> bool:
return self.visible # type: ignore
@ -345,11 +345,11 @@ class ServicePool(UUIDModel, TaggingMixin): # type: ignore
for ac in self.calendarAccess.all():
if ac.access == states.action.ALLOW and self.fallbackAccess == states.action.DENY:
nextE = calendar.CalendarChecker(ac.calendar).nextEvent(chkDateTime, False)
nextE = calendar.CalendarChecker(ac.calendar).next_event(chkDateTime, False)
if not deadLine or (nextE and deadLine > nextE):
deadLine = nextE
elif ac.access == states.action.DENY: # DENY
nextE = calendar.CalendarChecker(ac.calendar).nextEvent(chkDateTime, True)
nextE = calendar.CalendarChecker(ac.calendar).next_event(chkDateTime, True)
if not deadLine or (nextE and deadLine > nextE):
deadLine = nextE

View File

@ -256,7 +256,7 @@ class User(UUIDModel, properties.PropertiesMixin):
# Removes all user services assigned to this user (unassign it and mark for removal)
for us in to_delete.userServices.all():
us.assignToUser(None)
us.assign_to(None)
us.remove()
logger.debug('Deleted user %s', to_delete)

View File

@ -416,7 +416,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
self.os_state = state
self.save(update_fields=['os_state', 'state_date'])
def assignToUser(self, user: typing.Optional[User]) -> None:
def assign_to(self, user: typing.Optional[User]) -> None:
"""
Assigns this user deployed service to an user.
@ -494,13 +494,13 @@ class UserService(UUIDModel, properties.PropertiesMixin):
except Exception: # Does not exists, log it and ignore it
logger.warning('Session %s does not exists for user deployed service', self.id)
def isUsable(self) -> bool:
def is_usable(self) -> bool:
"""
Returns if this service is usable
"""
return State.is_usable(self.state)
def isPreparing(self) -> bool:
def is_preparing(self) -> bool:
"""
Returns if this service is in preparation (not ready to use, but in its way to be so...)
"""
@ -544,7 +544,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
"""
Marks for removal or cancels it, depending on state
"""
if self.isUsable():
if self.is_usable():
self.remove()
else:
self.cancel()

View File

@ -55,7 +55,7 @@ def __loadModules():
from uds.core import messaging
# We need to import all modules that are descendant of this package
modfinder.dynamicLoadAndRegisterModules(messaging.factory(), messaging.Notifier, __name__)
modfinder.dynamically_load_and_register_modules(messaging.factory(), messaging.Notifier, __name__)
__loadModules()

View File

@ -119,7 +119,7 @@ class LinuxOsManager(osmanagers.OSManager):
def ignore_deadline(self) -> bool:
return not self._deadLine
def isRemovableOnLogout(self, userService: 'UserService') -> bool:
def is_removableOnLogout(self, userService: 'UserService') -> bool:
'''
Says if a machine is removable on logout
'''
@ -142,7 +142,7 @@ class LinuxOsManager(osmanagers.OSManager):
try:
msg, slevel = data.split('\t')
try:
level = log.LogLevel.fromStr(slevel)
level = log.LogLevel.from_str(slevel)
except Exception:
logger.debug('Do not understand level %s', slevel)
level = log.LogLevel.INFO
@ -160,7 +160,7 @@ class LinuxOsManager(osmanagers.OSManager):
This will be invoked for every assigned and unused user service that has been in this state at least 1/2 of Globalconfig.CHECK_UNUSED_TIME
This function can update userService values. Normal operation will be remove machines if this state is not valid
"""
if self.isRemovableOnLogout(userService):
if self.is_removableOnLogout(userService):
log.log(
userService,
log.LogLevel.INFO,

View File

@ -91,7 +91,7 @@ class TestOSManager(osmanagers.OSManager):
def release(self, userService: 'UserService') -> None:
logger.debug('User service %s released', userService)
def isRemovableOnLogout(self, userService: 'UserService') -> bool:
def is_removableOnLogout(self, userService: 'UserService') -> bool:
'''
Says if a machine is removable on logout
'''
@ -114,7 +114,7 @@ class TestOSManager(osmanagers.OSManager):
try:
msg, slevel = data.split('\t')
try:
level = log.LogLevel.fromStr(slevel)
level = log.LogLevel.from_str(slevel)
except Exception:
logger.debug('Do not understand level %s', slevel)
level = log.LogLevel.INFO
@ -132,7 +132,7 @@ class TestOSManager(osmanagers.OSManager):
This will be invoked for every assigned and unused user service that has been in this state at least 1/2 of Globalconfig.CHECK_UNUSED_TIME
This function can update userService values. Normal operation will be remove machines if this state is not valid
"""
if self.isRemovableOnLogout(userService):
if self.is_removableOnLogout(userService):
log.log(
userService,
log.LogLevel.INFO,

View File

@ -122,7 +122,7 @@ class WindowsOsManager(osmanagers.OSManager):
self.__setProcessUnusedMachines()
def isRemovableOnLogout(self, userService: 'UserService') -> bool:
def is_removableOnLogout(self, userService: 'UserService') -> bool:
"""
Says if a machine is removable on logout
"""
@ -148,7 +148,7 @@ class WindowsOsManager(osmanagers.OSManager):
try:
msg, levelStr = data.split('\t')
try:
level = log.LogLevel.fromStr(levelStr)
level = log.LogLevel.from_str(levelStr)
except Exception:
logger.debug('Do not understand level %s', levelStr)
level = log.LogLevel.INFO
@ -191,7 +191,7 @@ class WindowsOsManager(osmanagers.OSManager):
This will be invoked for every assigned and unused user service that has been in this state at least 1/2 of Globalconfig.CHECK_UNUSED_TIME
This function can update userService values. Normal operation will be remove machines if this state is not valid
"""
if self.isRemovableOnLogout(userService):
if self.is_removableOnLogout(userService):
log.log(
userService,
log.LogLevel.INFO,

View File

@ -235,7 +235,7 @@ class WinDomainOsManager(WindowsOsManager):
obj: typing.Optional[collections.abc.MutableMapping[str, typing.Any]]
try:
obj = next(
ldaputil.getAsDict(
ldaputil.as_dict(
ldapConnection,
base,
f'(&(objectClass=group)(|(cn={group})(sAMAccountName={group})))',
@ -260,7 +260,7 @@ class WinDomainOsManager(WindowsOsManager):
fltr = f'(&(objectClass=computer)(sAMAccountName={ldaputil.escape(machineName)}$))'
obj: typing.Optional[collections.abc.MutableMapping[str, typing.Any]]
try:
obj = next(ldaputil.getAsDict(ldapConnection, base, fltr, ['dn'], sizeLimit=50))
obj = next(ldaputil.as_dict(ldapConnection, base, fltr, ['dn'], sizeLimit=50))
except StopIteration:
obj = None

View File

@ -51,7 +51,7 @@ def __loadModules():
from uds.core import osmanagers
# OSManagers registers everything
modfinder.dynamicLoadAndRegisterModules(
modfinder.dynamically_load_and_register_modules(
osmanagers.factory(), osmanagers.OSManager, __name__
)

View File

@ -42,6 +42,6 @@ def __loadPlugins():
logger.debug('Initializing plugins...')
# Load all modules
modfinder.importModules(__name__)
modfinder.import_modules(__name__)
__loadPlugins()

View File

@ -60,7 +60,7 @@ def __loadModules() -> None:
alreadyAdded.add(cls.uuid)
availableReports.append(cls)
modfinder.dynamicLoadAndRegisterPackages(
modfinder.dynamically_load_and_register_packages(
addReportCls,
reports.Report,
__name__,

View File

@ -35,7 +35,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core import services, types
from uds.core import services, types, consts
from uds.core.ui import gui
from uds.core.util import validators
from uds.core.util.cache import Cache
@ -510,7 +510,7 @@ class OVirtProvider(
) -> typing.Optional[collections.abc.MutableMapping[str, typing.Any]]:
return self.__getApi().getConsoleConnection(machineId)
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
"""
Check if aws provider is reachable

View File

@ -110,7 +110,7 @@ class OVirtLinkedService(services.Service): # pylint: disable=too-many-public-m
label=_("Cluster"),
order=100,
fills={
'callbackName': 'ovFillResourcesFromCluster',
'callback_name': 'ovFillResourcesFromCluster',
'function': helpers.getResources,
'parameters': ['cluster', 'ov', 'ev'],
},

View File

@ -157,7 +157,7 @@ class OGDeployment(services.UserService):
return State.ERROR
# Machine powered off, check what to do...
if self.service().isRemovableIfUnavailable():
if self.service().is_removableIfUnavailable():
return self.__error(
'Machine is unavailable and service has "Remove if unavailable" flag active.'
)

View File

@ -37,7 +37,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core import types
from uds.core import types, consts
from uds.core.services import ServiceProvider
from uds.core.ui import gui
from uds.core.util import validators
@ -290,7 +290,7 @@ class OGProvider(ServiceProvider):
def status(self, machineId: str) -> typing.Any:
return self.api.status(machineId)
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
"""
Check if aws provider is reachable

View File

@ -101,7 +101,7 @@ class OGService(services.Service):
label=_("OU"),
order=100,
fills={
'callbackName': 'osFillData',
'callback_name': 'osFillData',
'function': helpers.getResources,
'parameters': ['ov', 'ev', 'ou'],
},
@ -222,7 +222,7 @@ class OGService(services.Service):
def getReleaseURL(self, uuid: str, token: str) -> str:
return self._notifyURL(uuid, token, 'release')
def isRemovableIfUnavailable(self):
def is_removableIfUnavailable(self):
return self.startIfUnavailable.isTrue()
def is_avaliable(self) -> bool:

View File

@ -36,7 +36,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core import types
from uds.core import types, consts
from uds.core.services import ServiceProvider
from uds.core.ui import gui
from uds.core.util import validators
@ -340,7 +340,7 @@ class OpenNebulaProvider(ServiceProvider): # pylint: disable=too-many-public-me
def test(env: 'Environment', data: 'Module.ValuesType') -> list[typing.Any]:
return OpenNebulaProvider(env, data).testConnection()
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
"""
Check if aws provider is reachable

View File

@ -296,7 +296,7 @@ class OpenStackProvider(ServiceProvider):
"""
return OpenStackProvider(env, data).testConnection()
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
"""
Check if aws provider is reachable

View File

@ -38,7 +38,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core import types
from uds.core import types, consts
from uds.core.services import ServiceProvider
from uds.core.ui import gui
from uds.core.util import validators
@ -279,7 +279,7 @@ class ProviderLegacy(ServiceProvider):
"""
return ProviderLegacy(env, data).testConnection()
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
"""
Check if aws provider is reachable

View File

@ -117,7 +117,7 @@ class LiveService(services.Service):
label=_('Project'),
order=2,
fills={
'callbackName': 'osFillResources',
'callback_name': 'osFillResources',
'function': helpers.getResources,
'parameters': ['ov', 'ev', 'project', 'region', 'legacy'],
},
@ -129,7 +129,7 @@ class LiveService(services.Service):
label=_('Availability Zones'),
order=3,
fills={
'callbackName': 'osFillVolumees',
'callback_name': 'osFillVolumees',
'function': helpers.getVolumes,
'parameters': [
'ov',

View File

@ -44,7 +44,7 @@ from . import types
from uds.core.util import security
from uds.core.util.decorators import cached, ensureConnected
from uds.core.util.decorators import cached, ensure_connected
# DEFAULT_PORT = 8006
@ -256,16 +256,16 @@ class ProxmoxClient:
return False
return True
@ensureConnected
@cached('cluster', CACHE_DURATION, cachingKeyFnc=cachingKeyHelper)
@ensure_connected
@cached('cluster', CACHE_DURATION, key_fnc=cachingKeyHelper)
def getClusterInfo(self, **kwargs) -> types.ClusterStatus:
return types.ClusterStatus.fromJson(self._get('cluster/status'))
@ensureConnected
@ensure_connected
def getNextVMId(self) -> int:
return int(self._get('cluster/nextid')['data'])
@ensureConnected
@ensure_connected
def isVMIdAvailable(self, vmId: int) -> bool:
try:
self._get(f'cluster/nextid?vmid={vmId}')
@ -273,30 +273,30 @@ class ProxmoxClient:
return False
return True
@ensureConnected
@ensure_connected
@cached(
'nodeNets',
CACHE_DURATION,
cachingArgs=1,
cachingKWArgs=['node'],
cachingKeyFnc=cachingKeyHelper,
args=1,
kwargs=['node'],
key_fnc=cachingKeyHelper,
)
def getNodeNetworks(self, node: str, **kwargs):
return self._get('nodes/{}/network'.format(node))['data']
# pylint: disable=unused-argument
@ensureConnected
@ensure_connected
@cached(
'nodeGpuDevices',
CACHE_DURATION_LONG,
cachingArgs=1,
cachingKWArgs=['node'],
cachingKeyFnc=cachingKeyHelper,
args=1,
kwargs=['node'],
key_fnc=cachingKeyHelper,
)
def nodeGpuDevices(self, node: str, **kwargs) -> list[str]:
return [device['id'] for device in self._get(f'nodes/{node}/hardware/pci')['data'] if device.get('mdev')]
@ensureConnected
@ensure_connected
def getNodeVGPUs(self, node: str, **kwargs) -> list[typing.Any]:
return [
{
@ -310,11 +310,11 @@ class ProxmoxClient:
for gpu in self._get(f'nodes/{node}/hardware/pci/{device}/mdev')['data']
]
@ensureConnected
@ensure_connected
def nodeHasFreeVGPU(self, node: str, vgpu_type: str, **kwargs) -> bool:
return any(gpu['available'] and gpu['type'] == vgpu_type for gpu in self.getNodeVGPUs(node))
@ensureConnected
@ensure_connected
def getBestNodeForVm(
self,
minMemory: int = 0,
@ -352,7 +352,7 @@ class ProxmoxClient:
return best if best.status == 'online' else None
@ensureConnected
@ensure_connected
def cloneVm(
self,
vmId: int,
@ -422,12 +422,12 @@ class ProxmoxClient:
upid=types.UPID.from_dict(self._post('nodes/{}/qemu/{}/clone'.format(fromNode, vmId), data=params)),
)
@ensureConnected
@cached('hagrps', CACHE_DURATION, cachingKeyFnc=cachingKeyHelper)
@ensure_connected
@cached('hagrps', CACHE_DURATION, key_fnc=cachingKeyHelper)
def listHAGroups(self) -> list[str]:
return [g['group'] for g in self._get('cluster/ha/groups')['data']]
@ensureConnected
@ensure_connected
def enableVmHA(self, vmId: int, started: bool = False, group: typing.Optional[str] = None) -> None:
self._post(
'cluster/ha/resources',
@ -441,14 +441,14 @@ class ProxmoxClient:
+ ([('group', group)] if group else []),
)
@ensureConnected
@ensure_connected
def disableVmHA(self, vmId: int) -> None:
try:
self._delete('cluster/ha/resources/vm%3A{}'.format(vmId))
except Exception:
logger.exception('removeFromHA')
@ensureConnected
@ensure_connected
def setProtection(self, vmId: int, node: typing.Optional[str] = None, protection: bool = False) -> None:
params: list[tuple[str, str]] = [
('protection', str(int(protection))),
@ -456,24 +456,24 @@ class ProxmoxClient:
node = node or self.getVmInfo(vmId).node
self._post('nodes/{}/qemu/{}/config'.format(node, vmId), data=params)
@ensureConnected
@ensure_connected
def deleteVm(self, vmId: int, node: typing.Optional[str] = None, purge: bool = True) -> types.UPID:
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._delete('nodes/{}/qemu/{}?purge=1'.format(node, vmId)))
@ensureConnected
@ensure_connected
def getTask(self, node: str, upid: str) -> types.TaskStatus:
return types.TaskStatus.fromJson(
self._get('nodes/{}/tasks/{}/status'.format(node, urllib.parse.quote(upid)))
)
@ensureConnected
@ensure_connected
@cached(
'vms',
CACHE_DURATION,
cachingArgs=1,
cachingKWArgs='node',
cachingKeyFnc=cachingKeyHelper,
args=1,
kwargs='node',
key_fnc=cachingKeyHelper,
)
def listVms(self, node: typing.Union[None, str, collections.abc.Iterable[str]] = None) -> list[types.VMInfo]:
nodeList: collections.abc.Iterable[str]
@ -492,13 +492,13 @@ class ProxmoxClient:
return sorted(result, key=lambda x: '{}{}'.format(x.node, x.name))
@ensureConnected
@ensure_connected
@cached(
'vmip',
CACHE_INFO_DURATION,
cachingArgs=[1, 2],
cachingKWArgs=['vmId', 'poolId'],
cachingKeyFnc=cachingKeyHelper,
args=[1, 2],
kwargs=['vmId', 'poolId'],
key_fnc=cachingKeyHelper,
)
def getVMPoolInfo(self, vmId: int, poolId: str, **kwargs) -> types.VMInfo:
# try to locate machine in pool
@ -517,13 +517,13 @@ class ProxmoxClient:
return self.getVmInfo(vmId, node, **kwargs)
@ensureConnected
@ensure_connected
@cached(
'vmin',
CACHE_INFO_DURATION,
cachingArgs=[1, 2],
cachingKWArgs=['vmId', 'node'],
cachingKeyFnc=cachingKeyHelper,
args=[1, 2],
kwargs=['vmId', 'node'],
key_fnc=cachingKeyHelper,
)
def getVmInfo(self, vmId: int, node: typing.Optional[str] = None, **kwargs) -> types.VMInfo:
nodes = [types.Node(node, False, False, 0, '', '', '')] if node else self.getClusterInfo().nodes
@ -545,13 +545,13 @@ class ProxmoxClient:
raise ProxmoxNotFound()
@ensureConnected
@ensure_connected
# @allowCache('vmc', CACHE_DURATION, cachingArgs=[1, 2], cachingKWArgs=['vmId', 'node'], cachingKeyFnc=cachingKeyHelper)
def getVmConfiguration(self, vmId: int, node: typing.Optional[str] = None):
node = node or self.getVmInfo(vmId).node
return types.VMConfiguration.from_dict(self._get('nodes/{}/qemu/{}/config'.format(node, vmId))['data'])
@ensureConnected
@ensure_connected
def setVmMac(
self,
vmId: int,
@ -580,35 +580,35 @@ class ProxmoxClient:
data=[(netid, netdata)],
)
@ensureConnected
@ensure_connected
def startVm(self, vmId: int, node: typing.Optional[str] = None) -> types.UPID:
# if exitstatus is "OK" or contains "already running", all is fine
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._post('nodes/{}/qemu/{}/status/start'.format(node, vmId)))
@ensureConnected
@ensure_connected
def stopVm(self, vmId: int, node: typing.Optional[str] = None) -> types.UPID:
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._post('nodes/{}/qemu/{}/status/stop'.format(node, vmId)))
@ensureConnected
@ensure_connected
def resetVm(self, vmId: int, node: typing.Optional[str] = None) -> types.UPID:
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._post('nodes/{}/qemu/{}/status/reset'.format(node, vmId)))
@ensureConnected
@ensure_connected
def suspendVm(self, vmId: int, node: typing.Optional[str] = None) -> types.UPID:
# if exitstatus is "OK" or contains "already running", all is fine
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._post('nodes/{}/qemu/{}/status/suspend'.format(node, vmId)))
@ensureConnected
@ensure_connected
def shutdownVm(self, vmId: int, node: typing.Optional[str] = None) -> types.UPID:
# if exitstatus is "OK" or contains "already running", all is fine
node = node or self.getVmInfo(vmId).node
return types.UPID.from_dict(self._post('nodes/{}/qemu/{}/status/shutdown'.format(node, vmId)))
@ensureConnected
@ensure_connected
def convertToTemplate(self, vmId: int, node: typing.Optional[str] = None) -> None:
node = node or self.getVmInfo(vmId).node
self._post('nodes/{}/qemu/{}/template'.format(node, vmId))
@ -618,26 +618,26 @@ class ProxmoxClient:
# proxmox has a "resume", but start works for suspended vm so we use it
resumeVm = startVm
@ensureConnected
@ensure_connected
@cached(
'storage',
CACHE_DURATION,
cachingArgs=[1, 2],
cachingKWArgs=['storage', 'node'],
cachingKeyFnc=cachingKeyHelper,
args=[1, 2],
kwargs=['storage', 'node'],
key_fnc=cachingKeyHelper,
)
def getStorage(self, storage: str, node: str, **kwargs) -> types.StorageInfo:
return types.StorageInfo.from_dict(
self._get('nodes/{}/storage/{}/status'.format(node, urllib.parse.quote(storage)))['data']
)
@ensureConnected
@ensure_connected
@cached(
'storages',
CACHE_DURATION,
cachingArgs=[1, 2],
cachingKWArgs=['node', 'content'],
cachingKeyFnc=cachingKeyHelper,
args=[1, 2],
kwargs=['node', 'content'],
key_fnc=cachingKeyHelper,
)
def listStorages(
self,
@ -664,19 +664,19 @@ class ProxmoxClient:
return result
@ensureConnected
@cached('nodeStats', CACHE_INFO_DURATION, cachingKeyFnc=cachingKeyHelper)
@ensure_connected
@cached('nodeStats', CACHE_INFO_DURATION, key_fnc=cachingKeyHelper)
def getNodesStats(self, **kwargs) -> list[types.NodeStats]:
return [
types.NodeStats.from_dict(nodeStat) for nodeStat in self._get('cluster/resources?type=node')['data']
]
@ensureConnected
@cached('pools', CACHE_DURATION // 6, cachingKeyFnc=cachingKeyHelper)
@ensure_connected
@cached('pools', CACHE_DURATION // 6, key_fnc=cachingKeyHelper)
def listPools(self) -> list[types.PoolInfo]:
return [types.PoolInfo.from_dict(nodeStat) for nodeStat in self._get('pools')['data']]
@ensureConnected
@ensure_connected
def getConsoleConnection(
self, vmId: int, node: typing.Optional[str] = None
) -> typing.Optional[collections.abc.MutableMapping[str, typing.Any]]:

View File

@ -155,14 +155,14 @@ class TaskStatus(typing.NamedTuple):
def isRunning(self) -> bool:
return self.status == 'running'
def isFinished(self) -> bool:
def is_finished(self) -> bool:
return self.status == 'stopped'
def isCompleted(self) -> bool:
return self.isFinished() and self.exitstatus == 'OK'
return self.is_finished() and self.exitstatus == 'OK'
def isErrored(self) -> bool:
return self.isFinished() and not self.isCompleted()
def is_errored(self) -> bool:
return self.is_finished() and not self.isCompleted()
class NetworkConfiguration(typing.NamedTuple):

View File

@ -477,7 +477,7 @@ if sys.platform == 'win32':
except client.ProxmoxConnectionError:
return State.RUNNING # Try again later
if task.isErrored():
if task.is_errored():
return self.__error(task.exitstatus)
if task.isCompleted():

View File

@ -33,7 +33,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core import services, types
from uds.core import services, types, consts
from uds.core.ui import gui
from uds.core.util import validators
from uds.core.util.cache import Cache
@ -310,7 +310,7 @@ class ProxmoxProvider(
return vmId
# All assigned VMId will be left as unusable on UDS until released by time (3 months)
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def isAvailable(self) -> bool:
return self._getApi().test()

View File

@ -150,7 +150,7 @@ class ProxmoxPublication(services.Publication):
self._reason = str(e)
return self._state
if task.isErrored():
if task.is_errored():
self._reason = task.exitstatus
self._state = State.ERROR
else: # Finished

View File

@ -136,7 +136,7 @@ class ProxmoxLinkedService(services.Service): # pylint: disable=too-many-public
label=_("Base Machine"),
order=110,
fills={
'callbackName': 'pmFillResourcesFromMachine',
'callback_name': 'pmFillResourcesFromMachine',
'function': helpers.getStorage,
'parameters': ['machine', 'ov', 'ev'],
},
@ -341,6 +341,6 @@ class ProxmoxLinkedService(services.Service): # pylint: disable=too-many-public
) -> typing.Optional[collections.abc.MutableMapping[str, typing.Any]]:
return self.parent().getConsoleConnection(machineId)
@cached('reachable', Cache.SHORT_VALIDITY)
@cached('reachable', consts.system.SHORT_CACHE_TIMEOUT)
def is_avaliable(self) -> bool:
return self.parent().isAvailable()

Some files were not shown because too many files have changed in this diff Show More