mirror of
https://github.com/dkmstr/openuds.git
synced 2025-01-03 01:17:56 +03:00
Refactorized log types and fixes
This commit is contained in:
parent
6694b9d5bc
commit
53e0cefc21
@ -41,7 +41,7 @@ from django.utils.decorators import method_decorator
|
|||||||
from django.views.decorators.csrf import csrf_exempt
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
from django.views.generic.base import View
|
from django.views.generic.base import View
|
||||||
|
|
||||||
from uds.core import consts, exceptions
|
from uds.core import consts, exceptions, types
|
||||||
from uds.core.util import modfinder
|
from uds.core.util import modfinder
|
||||||
|
|
||||||
from . import processors, log
|
from . import processors, log
|
||||||
@ -164,20 +164,20 @@ class Dispatcher(View):
|
|||||||
logger.debug('Path: %s', full_path)
|
logger.debug('Path: %s', full_path)
|
||||||
logger.debug('Error: %s', e)
|
logger.debug('Error: %s', e)
|
||||||
|
|
||||||
log.log_operation(handler, 400, log.LogLevel.ERROR)
|
log.log_operation(handler, 400, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseBadRequest(
|
return http.HttpResponseBadRequest(
|
||||||
f'Invalid parameters invoking {full_path}: {e}',
|
f'Invalid parameters invoking {full_path}: {e}',
|
||||||
content_type="text/plain",
|
content_type="text/plain",
|
||||||
)
|
)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
allowed_methods: list[str] = [n for n in ['get', 'post', 'put', 'delete'] if hasattr(handler, n)]
|
allowed_methods: list[str] = [n for n in ['get', 'post', 'put', 'delete'] if hasattr(handler, n)]
|
||||||
log.log_operation(handler, 405, log.LogLevel.ERROR)
|
log.log_operation(handler, 405, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseNotAllowed(allowed_methods, content_type="text/plain")
|
return http.HttpResponseNotAllowed(allowed_methods, content_type="text/plain")
|
||||||
except exceptions.rest.AccessDenied:
|
except exceptions.rest.AccessDenied:
|
||||||
log.log_operation(handler, 403, log.LogLevel.ERROR)
|
log.log_operation(handler, 403, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseForbidden('access denied', content_type="text/plain")
|
return http.HttpResponseForbidden('access denied', content_type="text/plain")
|
||||||
except Exception:
|
except Exception:
|
||||||
log.log_operation(handler, 500, log.LogLevel.ERROR)
|
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
|
||||||
logger.exception('error accessing attribute')
|
logger.exception('error accessing attribute')
|
||||||
logger.debug('Getting attribute %s for %s', http_method, full_path)
|
logger.debug('Getting attribute %s for %s', http_method, full_path)
|
||||||
return http.HttpResponseServerError('Unexcepected error', content_type="text/plain")
|
return http.HttpResponseServerError('Unexcepected error', content_type="text/plain")
|
||||||
@ -206,28 +206,28 @@ class Dispatcher(View):
|
|||||||
|
|
||||||
# Log de operation on the audit log for admin
|
# Log de operation on the audit log for admin
|
||||||
# Exceptiol will also be logged, but with ERROR level
|
# Exceptiol will also be logged, but with ERROR level
|
||||||
log.log_operation(handler, response.status_code, log.LogLevel.INFO)
|
log.log_operation(handler, response.status_code, types.log.LogLevel.INFO)
|
||||||
return response
|
return response
|
||||||
except exceptions.rest.RequestError as e:
|
except exceptions.rest.RequestError as e:
|
||||||
log.log_operation(handler, 400, log.LogLevel.ERROR)
|
log.log_operation(handler, 400, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
||||||
except exceptions.rest.ResponseError as e:
|
except exceptions.rest.ResponseError as e:
|
||||||
log.log_operation(handler, 500, log.LogLevel.ERROR)
|
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseServerError(str(e), content_type="text/plain")
|
return http.HttpResponseServerError(str(e), content_type="text/plain")
|
||||||
except exceptions.rest.NotSupportedError as e:
|
except exceptions.rest.NotSupportedError as e:
|
||||||
log.log_operation(handler, 501, log.LogLevel.ERROR)
|
log.log_operation(handler, 501, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
||||||
except exceptions.rest.AccessDenied as e:
|
except exceptions.rest.AccessDenied as e:
|
||||||
log.log_operation(handler, 403, log.LogLevel.ERROR)
|
log.log_operation(handler, 403, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseForbidden(str(e), content_type="text/plain")
|
return http.HttpResponseForbidden(str(e), content_type="text/plain")
|
||||||
except exceptions.rest.NotFound as e:
|
except exceptions.rest.NotFound as e:
|
||||||
log.log_operation(handler, 404, log.LogLevel.ERROR)
|
log.log_operation(handler, 404, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseNotFound(str(e), content_type="text/plain")
|
return http.HttpResponseNotFound(str(e), content_type="text/plain")
|
||||||
except exceptions.rest.HandlerError as e:
|
except exceptions.rest.HandlerError as e:
|
||||||
log.log_operation(handler, 500, log.LogLevel.ERROR)
|
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
|
||||||
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.log_operation(handler, 500, log.LogLevel.ERROR)
|
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
|
||||||
# Get ecxeption backtrace
|
# Get ecxeption backtrace
|
||||||
trace_back = traceback.format_exc()
|
trace_back = traceback.format_exc()
|
||||||
logger.error('Exception processing request: %s', full_path)
|
logger.error('Exception processing request: %s', full_path)
|
||||||
|
@ -44,7 +44,7 @@ from uds.core.managers.userservice import UserServiceManager
|
|||||||
from uds.core.util import log, security
|
from uds.core.util import log, security
|
||||||
from uds.core.util.cache import Cache
|
from uds.core.util.cache import Cache
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.models import Server, Service, TicketStore, UserService
|
from uds.models import Server, Service, TicketStore, UserService
|
||||||
from uds.models.service import ServiceTokenAlias
|
from uds.models.service import ServiceTokenAlias
|
||||||
@ -194,23 +194,23 @@ class ActorV3Action(Handler):
|
|||||||
# ensure idsLists has upper and lower versions for case sensitive databases
|
# ensure idsLists has upper and lower versions for case sensitive databases
|
||||||
idsList = fix_list_of_ids(idsList)
|
idsList = fix_list_of_ids(idsList)
|
||||||
|
|
||||||
validId: typing.Optional[str] = service.get_valid_id(idsList)
|
service_id: typing.Optional[str] = service.get_valid_id(idsList)
|
||||||
|
|
||||||
is_remote = self._params.get('session_type', '')[:4] in ('xrdp', 'RDP-')
|
is_remote = self._params.get('session_type', '')[:4] in ('xrdp', 'RDP-')
|
||||||
|
|
||||||
# Must be valid
|
# Must be valid
|
||||||
if action in (NotifyActionType.LOGIN, NotifyActionType.LOGOUT):
|
if action in (NotifyActionType.LOGIN, NotifyActionType.LOGOUT):
|
||||||
if not validId: # For login/logout, we need a valid id
|
if not service_id: # For login/logout, we need a valid id
|
||||||
raise Exception()
|
raise Exception()
|
||||||
# Notify Service that someone logged in/out
|
# Notify Service that someone logged in/out
|
||||||
|
|
||||||
if action == NotifyActionType.LOGIN:
|
if action == NotifyActionType.LOGIN:
|
||||||
# Try to guess if this is a remote session
|
# Try to guess if this is a remote session
|
||||||
service.process_login(validId, remote_login=is_remote)
|
service.process_login(service_id, remote_login=is_remote)
|
||||||
elif action == NotifyActionType.LOGOUT:
|
elif action == NotifyActionType.LOGOUT:
|
||||||
service.process_logout(validId, remote_login=is_remote)
|
service.process_logout(service_id, remote_login=is_remote)
|
||||||
elif action == NotifyActionType.DATA:
|
elif action == NotifyActionType.DATA:
|
||||||
service.notify_data(validId, self._params['data'])
|
service.notify_data(service_id, self._params['data'])
|
||||||
else:
|
else:
|
||||||
raise Exception('Invalid action')
|
raise Exception('Invalid action')
|
||||||
|
|
||||||
@ -294,7 +294,7 @@ class Register(ActorV3Action):
|
|||||||
'run_once_command': self._params['run_once_command'],
|
'run_once_command': self._params['run_once_command'],
|
||||||
'custom': self._params.get('custom', ''),
|
'custom': self._params.get('custom', ''),
|
||||||
})
|
})
|
||||||
actor_token.stamp = sql_datetime()
|
actor_token.stamp = sql_now()
|
||||||
actor_token.save()
|
actor_token.save()
|
||||||
logger.info('Registered actor %s', self._params)
|
logger.info('Registered actor %s', self._params)
|
||||||
found = True
|
found = True
|
||||||
@ -318,7 +318,7 @@ class Register(ActorV3Action):
|
|||||||
'version': '',
|
'version': '',
|
||||||
'os_type': self._params.get('os', types.os.KnownOS.UNKNOWN.os_name()),
|
'os_type': self._params.get('os', types.os.KnownOS.UNKNOWN.os_name()),
|
||||||
'mac': self._params['mac'],
|
'mac': self._params['mac'],
|
||||||
'stamp': sql_datetime(),
|
'stamp': sql_now(),
|
||||||
}
|
}
|
||||||
|
|
||||||
actor_token = Server.objects.create(**kwargs)
|
actor_token = Server.objects.create(**kwargs)
|
||||||
@ -704,14 +704,14 @@ class Log(ActorV3Action):
|
|||||||
userservice = self.get_userservice()
|
userservice = self.get_userservice()
|
||||||
if userservice.actor_version < '4.0.0':
|
if userservice.actor_version < '4.0.0':
|
||||||
# Adjust loglevel to own, we start on 10000 for OTHER, and received is 0 for OTHER
|
# Adjust loglevel to own, we start on 10000 for OTHER, and received is 0 for OTHER
|
||||||
level = log.LogLevel.from_int(int(self._params['level']) + 10000)
|
level = types.log.LogLevel.from_int(int(self._params['level']) + 10000)
|
||||||
else:
|
else:
|
||||||
level = log.LogLevel.from_int(int(self._params['level']))
|
level = types.log.LogLevel.from_int(int(self._params['level']))
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
level,
|
level,
|
||||||
self._params['message'],
|
self._params['message'],
|
||||||
log.LogSource.ACTOR,
|
types.log.LogSource.ACTOR,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ActorV3Action.actor_result('ok')
|
return ActorV3Action.actor_result('ok')
|
||||||
|
@ -39,7 +39,7 @@ from django.utils.translation import gettext as _
|
|||||||
|
|
||||||
from uds.core import exceptions
|
from uds.core import exceptions
|
||||||
from uds.core.util import ensure, permissions
|
from uds.core.util import ensure, permissions
|
||||||
from uds.core.util.model import process_uuid, sql_datetime
|
from uds.core.util.model import process_uuid, sql_now
|
||||||
from uds.models.calendar import Calendar
|
from uds.models.calendar import Calendar
|
||||||
from uds.models.calendar_rule import CalendarRule, FrequencyInfo
|
from uds.models.calendar_rule import CalendarRule, FrequencyInfo
|
||||||
from uds.REST.model import DetailHandler
|
from uds.REST.model import DetailHandler
|
||||||
@ -158,7 +158,7 @@ class CalendarRules(DetailHandler): # pylint: disable=too-many-public-methods
|
|||||||
logger.debug('Deleting rule %s from %s', item, parent)
|
logger.debug('Deleting rule %s from %s', item, parent)
|
||||||
try:
|
try:
|
||||||
calRule = parent.rules.get(uuid=process_uuid(item))
|
calRule = parent.rules.get(uuid=process_uuid(item))
|
||||||
calRule.calendar.modified = sql_datetime()
|
calRule.calendar.modified = sql_now()
|
||||||
calRule.calendar.save()
|
calRule.calendar.save()
|
||||||
calRule.delete()
|
calRule.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -114,10 +114,10 @@ class MetaServicesPool(DetailHandler):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
("Added" if uuid is None else "Modified")
|
("Added" if uuid is None else "Modified")
|
||||||
+ " meta pool member {}/{}/{} by {}".format(pool.name, priority, enabled, self._user.pretty_name),
|
+ " meta pool member {}/{}/{} by {}".format(pool.name, priority, enabled, self._user.pretty_name),
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_item(self, parent: 'Model', item: str) -> None:
|
def delete_item(self, parent: 'Model', item: str) -> None:
|
||||||
@ -127,7 +127,7 @@ class MetaServicesPool(DetailHandler):
|
|||||||
|
|
||||||
member.delete()
|
member.delete()
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
|
|
||||||
class MetaAssignedService(DetailHandler):
|
class MetaAssignedService(DetailHandler):
|
||||||
@ -264,7 +264,7 @@ class MetaAssignedService(DetailHandler):
|
|||||||
else:
|
else:
|
||||||
raise self.invalid_item_response(_('Item is not removable'))
|
raise self.invalid_item_response(_('Item is not removable'))
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
# Only owner is allowed to change right now
|
# Only owner is allowed to change right now
|
||||||
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
|
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
|
||||||
@ -296,4 +296,4 @@ class MetaAssignedService(DetailHandler):
|
|||||||
userservice.save()
|
userservice.save()
|
||||||
|
|
||||||
# Log change
|
# Log change
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
@ -110,9 +110,9 @@ class AccessCalendars(DetailHandler):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'{"Added" if uuid is None else "Updated"} access calendar {calendar.name}/{access} by {self._user.pretty_name}',
|
f'{"Added" if uuid is None else "Updated"} access calendar {calendar.name}/{access} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_item(self, parent: 'Model', item: str) -> None:
|
def delete_item(self, parent: 'Model', item: str) -> None:
|
||||||
@ -121,7 +121,7 @@ class AccessCalendars(DetailHandler):
|
|||||||
logStr = f'Removed access calendar {calendarAccess.calendar.name} by {self._user.pretty_name}'
|
logStr = f'Removed access calendar {calendarAccess.calendar.name} by {self._user.pretty_name}'
|
||||||
calendarAccess.delete()
|
calendarAccess.delete()
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
|
|
||||||
class ActionsCalendars(DetailHandler):
|
class ActionsCalendars(DetailHandler):
|
||||||
@ -222,7 +222,7 @@ class ActionsCalendars(DetailHandler):
|
|||||||
params=params,
|
params=params,
|
||||||
)
|
)
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, log_string, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, log_string, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
def delete_item(self, parent: 'Model', item: str) -> None:
|
def delete_item(self, parent: 'Model', item: str) -> None:
|
||||||
parent = ensure.is_instance(parent, models.ServicePool)
|
parent = ensure.is_instance(parent, models.ServicePool)
|
||||||
@ -236,7 +236,7 @@ class ActionsCalendars(DetailHandler):
|
|||||||
|
|
||||||
calendarAction.delete()
|
calendarAction.delete()
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
def execute(self, parent: 'Model', item: str) -> typing.Any:
|
def execute(self, parent: 'Model', item: str) -> typing.Any:
|
||||||
parent = ensure.is_instance(parent, models.ServicePool)
|
parent = ensure.is_instance(parent, models.ServicePool)
|
||||||
@ -252,7 +252,7 @@ class ActionsCalendars(DetailHandler):
|
|||||||
f'{calendarAction.params}" by {self._user.pretty_name}'
|
f'{calendarAction.params}" by {self._user.pretty_name}'
|
||||||
)
|
)
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
calendarAction.execute()
|
calendarAction.execute()
|
||||||
|
|
||||||
return self.success()
|
return self.success()
|
||||||
|
@ -38,7 +38,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from uds import models
|
from uds import models
|
||||||
from uds.core import consts, types
|
from uds.core import consts, types
|
||||||
from uds.core.exceptions import rest as rest_exceptions
|
from uds.core.exceptions import rest as rest_exceptions
|
||||||
from uds.core.util import decorators, validators, log, model
|
from uds.core.util import decorators, validators, model
|
||||||
from uds.REST import Handler
|
from uds.REST import Handler
|
||||||
from uds.REST.utils import rest_result
|
from uds.REST.utils import rest_result
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ logger = logging.getLogger(__name__)
|
|||||||
class ServerRegisterBase(Handler):
|
class ServerRegisterBase(Handler):
|
||||||
def post(self) -> collections.abc.MutableMapping[str, typing.Any]:
|
def post(self) -> collections.abc.MutableMapping[str, typing.Any]:
|
||||||
serverToken: models.Server
|
serverToken: models.Server
|
||||||
now = model.sql_datetime()
|
now = model.sql_now()
|
||||||
ip = self._params.get('ip', self.request.ip)
|
ip = self._params.get('ip', self.request.ip)
|
||||||
if ':' in ip:
|
if ':' in ip:
|
||||||
# If zone is present, remove it
|
# If zone is present, remove it
|
||||||
@ -122,7 +122,7 @@ class ServerRegisterBase(Handler):
|
|||||||
listen_port=port,
|
listen_port=port,
|
||||||
hostname=self._params['hostname'],
|
hostname=self._params['hostname'],
|
||||||
certificate=certificate,
|
certificate=certificate,
|
||||||
log_level=self._params.get('log_level', log.LogLevel.INFO.value),
|
log_level=self._params.get('log_level', types.log.LogLevel.INFO.value),
|
||||||
stamp=now,
|
stamp=now,
|
||||||
type=self._params['type'],
|
type=self._params['type'],
|
||||||
subtype=self._params.get('subtype', ''), # Optional
|
subtype=self._params.get('subtype', ''), # Optional
|
||||||
|
@ -38,7 +38,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from uds import models
|
from uds import models
|
||||||
from uds.core import consts, types, ui
|
from uds.core import consts, types, ui
|
||||||
from uds.core.util import net, permissions, ensure
|
from uds.core.util import net, permissions, ensure
|
||||||
from uds.core.util.model import sql_datetime, process_uuid
|
from uds.core.util.model import sql_now, process_uuid
|
||||||
from uds.core.exceptions.rest import NotFound, RequestError
|
from uds.core.exceptions.rest import NotFound, RequestError
|
||||||
from uds.REST.model import DetailHandler, ModelHandler
|
from uds.REST.model import DetailHandler, ModelHandler
|
||||||
|
|
||||||
@ -247,12 +247,12 @@ class ServersServers(DetailHandler):
|
|||||||
parent = ensure.is_instance(parent, models.ServerGroup)
|
parent = ensure.is_instance(parent, models.ServerGroup)
|
||||||
# Item is the uuid of the server to add
|
# Item is the uuid of the server to add
|
||||||
server: typing.Optional['models.Server'] = None # Avoid warning on reference before assignment
|
server: typing.Optional['models.Server'] = None # Avoid warning on reference before assignment
|
||||||
|
mac: str = ''
|
||||||
if item is None:
|
if item is None:
|
||||||
# Create new, depending on server type
|
# Create new, depending on server type
|
||||||
if parent.type == types.servers.ServerType.UNMANAGED:
|
if parent.type == types.servers.ServerType.UNMANAGED:
|
||||||
# Ensure mac is emty or valid
|
# Ensure mac is emty or valid
|
||||||
mac: str = self._params['mac'].strip().upper()
|
mac = self._params['mac'].strip().upper()
|
||||||
if mac and not net.is_valid_mac(mac):
|
if mac and not net.is_valid_mac(mac):
|
||||||
raise self.invalid_request_response('Invalid MAC address')
|
raise self.invalid_request_response('Invalid MAC address')
|
||||||
# Create a new one, and add it to group
|
# Create a new one, and add it to group
|
||||||
@ -265,7 +265,7 @@ class ServersServers(DetailHandler):
|
|||||||
mac=mac,
|
mac=mac,
|
||||||
type=parent.type,
|
type=parent.type,
|
||||||
subtype=parent.subtype,
|
subtype=parent.subtype,
|
||||||
stamp=sql_datetime(),
|
stamp=sql_now(),
|
||||||
)
|
)
|
||||||
# Add to group
|
# Add to group
|
||||||
parent.servers.add(server)
|
parent.servers.add(server)
|
||||||
@ -284,7 +284,7 @@ class ServersServers(DetailHandler):
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if parent.type == types.servers.ServerType.UNMANAGED:
|
if parent.type == types.servers.ServerType.UNMANAGED:
|
||||||
mac: str = self._params['mac'].strip().upper()
|
mac = self._params['mac'].strip().upper()
|
||||||
if mac and not net.is_valid_mac(mac):
|
if mac and not net.is_valid_mac(mac):
|
||||||
raise self.invalid_request_response('Invalid MAC address')
|
raise self.invalid_request_response('Invalid MAC address')
|
||||||
try:
|
try:
|
||||||
@ -292,9 +292,10 @@ class ServersServers(DetailHandler):
|
|||||||
# Update register info also on update
|
# Update register info also on update
|
||||||
register_username=self._user.name,
|
register_username=self._user.name,
|
||||||
register_ip=self._request.ip,
|
register_ip=self._request.ip,
|
||||||
ip=self._params['ip'],
|
|
||||||
hostname=self._params['hostname'],
|
hostname=self._params['hostname'],
|
||||||
|
ip=self._params['ip'],
|
||||||
mac=mac,
|
mac=mac,
|
||||||
|
stamp=sql_now(), # Modified now
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise self.invalid_item_response() from None
|
raise self.invalid_item_response() from None
|
||||||
|
@ -44,7 +44,7 @@ from uds.core.ui import gui
|
|||||||
from uds.core.consts.images import DEFAULT_THUMB_BASE64
|
from uds.core.consts.images import DEFAULT_THUMB_BASE64
|
||||||
from uds.core.util import log, permissions, ensure
|
from uds.core.util import log, permissions, ensure
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.util.model import sql_datetime, process_uuid
|
from uds.core.util.model import sql_now, process_uuid
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.models import Account, Image, OSManager, Service, ServicePool, ServicePoolGroup, User
|
from uds.models import Account, Image, OSManager, Service, ServicePool, ServicePoolGroup, User
|
||||||
from uds.REST.model import ModelHandler
|
from uds.REST.model import ModelHandler
|
||||||
@ -130,7 +130,7 @@ class ServicesPools(ModelHandler):
|
|||||||
self, *args: typing.Any, **kwargs: typing.Any
|
self, *args: typing.Any, **kwargs: typing.Any
|
||||||
) -> typing.Generator[types.rest.ItemDictType, None, None]:
|
) -> typing.Generator[types.rest.ItemDictType, None, None]:
|
||||||
# Optimized query, due that there is a lot of info needed for theee
|
# Optimized query, due that there is a lot of info needed for theee
|
||||||
d = sql_datetime() - datetime.timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
d = sql_now() - datetime.timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
||||||
return super().get_items(
|
return super().get_items(
|
||||||
overview=kwargs.get('overview', True),
|
overview=kwargs.get('overview', True),
|
||||||
query=(
|
query=(
|
||||||
|
@ -41,7 +41,7 @@ from uds import models
|
|||||||
from uds.core import exceptions, types
|
from uds.core import exceptions, types
|
||||||
from uds.core.util import permissions
|
from uds.core.util import permissions
|
||||||
from uds.core.util.cache import Cache
|
from uds.core.util.cache import Cache
|
||||||
from uds.core.util.model import process_uuid, sql_datetime
|
from uds.core.util.model import process_uuid, sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.util.stats import counters
|
from uds.core.util.stats import counters
|
||||||
from uds.REST import Handler
|
from uds.REST import Handler
|
||||||
@ -69,7 +69,7 @@ def get_servicepools_counters(
|
|||||||
cacheKey = (
|
cacheKey = (
|
||||||
(servicePool and str(servicePool.id) or 'all') + str(counter_type) + str(POINTS) + str(since_days)
|
(servicePool and str(servicePool.id) or 'all') + str(counter_type) + str(POINTS) + str(since_days)
|
||||||
)
|
)
|
||||||
to = sql_datetime()
|
to = sql_now()
|
||||||
since: datetime.datetime = to - datetime.timedelta(days=since_days)
|
since: datetime.datetime = to - datetime.timedelta(days=since_days)
|
||||||
|
|
||||||
cachedValue: typing.Optional[bytes] = cache.get(cacheKey)
|
cachedValue: typing.Optional[bytes] = cache.get(cacheKey)
|
||||||
|
@ -104,8 +104,8 @@ class TunnelTicket(Handler):
|
|||||||
now = sql_stamp_seconds()
|
now = sql_stamp_seconds()
|
||||||
totalTime = now - extra.get('b', now - 1)
|
totalTime = now - extra.get('b', now - 1)
|
||||||
msg = f'User {user.name} stopped tunnel {extra.get("t", "")[:8]}... to {host}:{port}: u:{sent}/d:{recv}/t:{totalTime}.'
|
msg = f'User {user.name} stopped tunnel {extra.get("t", "")[:8]}... to {host}:{port}: u:{sent}/d:{recv}/t:{totalTime}.'
|
||||||
log.log(user.manager, log.LogLevel.INFO, msg)
|
log.log(user.manager, types.log.LogLevel.INFO, msg)
|
||||||
log.log(user_service, log.LogLevel.INFO, msg)
|
log.log(user_service, types.log.LogLevel.INFO, msg)
|
||||||
|
|
||||||
# Try to log Close event
|
# Try to log Close event
|
||||||
try:
|
try:
|
||||||
@ -133,8 +133,8 @@ class TunnelTicket(Handler):
|
|||||||
tunnel=self._args[0],
|
tunnel=self._args[0],
|
||||||
)
|
)
|
||||||
msg = f'User {user.name} started tunnel {self._args[0][:8]}... to {host}:{port} from {self._args[1]}.'
|
msg = f'User {user.name} started tunnel {self._args[0][:8]}... to {host}:{port} from {self._args[1]}.'
|
||||||
log.log(user.manager, log.LogLevel.INFO, msg)
|
log.log(user.manager, types.log.LogLevel.INFO, msg)
|
||||||
log.log(user_service, log.LogLevel.INFO, msg)
|
log.log(user_service, types.log.LogLevel.INFO, msg)
|
||||||
# Generate new, notify only, ticket
|
# Generate new, notify only, ticket
|
||||||
notifyTicket = models.TicketStore.create_for_tunnel(
|
notifyTicket = models.TicketStore.create_for_tunnel(
|
||||||
userService=user_service,
|
userService=user_service,
|
||||||
|
@ -224,7 +224,7 @@ class AssignedService(DetailHandler):
|
|||||||
else:
|
else:
|
||||||
raise self.invalid_item_response(_('Item is not removable'))
|
raise self.invalid_item_response(_('Item is not removable'))
|
||||||
|
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
# Only owner is allowed to change right now
|
# Only owner is allowed to change right now
|
||||||
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
|
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
|
||||||
@ -253,7 +253,7 @@ class AssignedService(DetailHandler):
|
|||||||
userService.save()
|
userService.save()
|
||||||
|
|
||||||
# Log change
|
# Log change
|
||||||
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
|
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
|
||||||
|
|
||||||
def reset(self, parent: 'models.ServicePool', item: str) -> typing.Any:
|
def reset(self, parent: 'models.ServicePool', item: str) -> typing.Any:
|
||||||
userService = parent.userServices.get(uuid=process_uuid(item))
|
userService = parent.userServices.get(uuid=process_uuid(item))
|
||||||
@ -376,9 +376,9 @@ class Groups(DetailHandler):
|
|||||||
parent.assignedGroups.add(group)
|
parent.assignedGroups.add(group)
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Added group {group.pretty_name} by {self._user.pretty_name}',
|
f'Added group {group.pretty_name} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_item(self, parent: 'Model', item: str) -> None:
|
def delete_item(self, parent: 'Model', item: str) -> None:
|
||||||
@ -387,9 +387,9 @@ class Groups(DetailHandler):
|
|||||||
parent.assignedGroups.remove(group)
|
parent.assignedGroups.remove(group)
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Removed group {group.pretty_name} by {self._user.pretty_name}',
|
f'Removed group {group.pretty_name} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -438,9 +438,9 @@ class Transports(DetailHandler):
|
|||||||
parent.transports.add(transport)
|
parent.transports.add(transport)
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Added transport {transport.name} by {self._user.pretty_name}',
|
f'Added transport {transport.name} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_item(self, parent: 'Model', item: str) -> None:
|
def delete_item(self, parent: 'Model', item: str) -> None:
|
||||||
@ -449,9 +449,9 @@ class Transports(DetailHandler):
|
|||||||
parent.transports.remove(transport)
|
parent.transports.remove(transport)
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Removed transport {transport.name} by {self._user.pretty_name}',
|
f'Removed transport {transport.name} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -482,9 +482,9 @@ class Publications(DetailHandler):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Initiated publication v{parent.current_pub_revision} by {self._user.pretty_name}',
|
f'Initiated publication v{parent.current_pub_revision} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.success()
|
return self.success()
|
||||||
@ -512,9 +512,9 @@ class Publications(DetailHandler):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
parent,
|
parent,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Canceled publication v{parent.current_pub_revision} by {self._user.pretty_name}',
|
f'Canceled publication v{parent.current_pub_revision} by {self._user.pretty_name}',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.success()
|
return self.success()
|
||||||
|
@ -77,7 +77,7 @@ class TokenInfo:
|
|||||||
return TokenInfo(
|
return TokenInfo(
|
||||||
access_token=dct['access_token'],
|
access_token=dct['access_token'],
|
||||||
token_type=dct['token_type'],
|
token_type=dct['token_type'],
|
||||||
expires=model.sql_datetime() + datetime.timedelta(seconds=dct['expires_in'] - 10),
|
expires=model.sql_now() + datetime.timedelta(seconds=dct['expires_in'] - 10),
|
||||||
refresh_token=dct['refresh_token'],
|
refresh_token=dct['refresh_token'],
|
||||||
scope=dct['scope'],
|
scope=dct['scope'],
|
||||||
info=dct.get('info', {}),
|
info=dct.get('info', {}),
|
||||||
@ -560,7 +560,7 @@ class OAuth2Authenticator(auths.Authenticator):
|
|||||||
token = TokenInfo(
|
token = TokenInfo(
|
||||||
access_token=parameters.get_params.get('access_token', ''),
|
access_token=parameters.get_params.get('access_token', ''),
|
||||||
token_type=parameters.get_params.get('token_type', ''),
|
token_type=parameters.get_params.get('token_type', ''),
|
||||||
expires=model.sql_datetime()
|
expires=model.sql_now()
|
||||||
+ datetime.timedelta(seconds=int(parameters.get_params.get('expires_in', 0))),
|
+ datetime.timedelta(seconds=int(parameters.get_params.get('expires_in', 0))),
|
||||||
refresh_token=parameters.get_params.get('refresh_token', ''),
|
refresh_token=parameters.get_params.get('refresh_token', ''),
|
||||||
scope=parameters.get_params.get('scope', ''),
|
scope=parameters.get_params.get('scope', ''),
|
||||||
|
@ -49,7 +49,7 @@ from uds.core.managers.crypto import CryptoManager
|
|||||||
from uds.core.types.requests import ExtendedHttpRequest
|
from uds.core.types.requests import ExtendedHttpRequest
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import security, decorators, auth as auth_utils
|
from uds.core.util import security, decorators, auth as auth_utils
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -551,9 +551,9 @@ class SAMLAuthenticator(auths.Authenticator):
|
|||||||
),
|
),
|
||||||
# This is a date of end of validity
|
# This is a date of end of validity
|
||||||
'metadataValidUntil': (
|
'metadataValidUntil': (
|
||||||
sql_datetime() + datetime.timedelta(days=self.metadata_validity_duration.as_int())
|
sql_now() + datetime.timedelta(days=self.metadata_validity_duration.as_int())
|
||||||
if self.metadata_cache_duration.value > 0
|
if self.metadata_cache_duration.value > 0
|
||||||
else sql_datetime() + datetime.timedelta(days=365 * 10)
|
else sql_now() + datetime.timedelta(days=365 * 10)
|
||||||
),
|
),
|
||||||
'nameIdEncrypted': self.use_name_id_encrypted.as_bool(),
|
'nameIdEncrypted': self.use_name_id_encrypted.as_bool(),
|
||||||
'authnRequestsSigned': self.use_authn_requests_signed.as_bool(),
|
'authnRequestsSigned': self.use_authn_requests_signed.as_bool(),
|
||||||
|
@ -33,35 +33,26 @@ Provides useful functions for authenticating, used by web interface.
|
|||||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||||
'''
|
'''
|
||||||
import base64
|
import base64
|
||||||
|
import codecs
|
||||||
|
import collections.abc
|
||||||
import logging
|
import logging
|
||||||
import typing
|
import typing
|
||||||
import collections.abc
|
|
||||||
import codecs
|
|
||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from django.http import (
|
|
||||||
HttpResponseRedirect,
|
|
||||||
HttpResponseForbidden,
|
|
||||||
HttpResponse,
|
|
||||||
HttpRequest,
|
|
||||||
)
|
|
||||||
from django.utils.translation import get_language
|
|
||||||
from django.urls import reverse
|
|
||||||
|
|
||||||
|
from django.http import HttpRequest, HttpResponse, HttpResponseForbidden, HttpResponseRedirect
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils.translation import get_language
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
from uds.core import auths, types, exceptions, consts
|
from uds import models
|
||||||
|
from uds.core import auths, consts, exceptions, types
|
||||||
|
from uds.core.auths import Authenticator as AuthenticatorInstance
|
||||||
|
from uds.core.managers.crypto import CryptoManager
|
||||||
from uds.core.types.requests import ExtendedHttpRequest
|
from uds.core.types.requests import ExtendedHttpRequest
|
||||||
from uds.core.util import log
|
from uds.core.types.states import State
|
||||||
from uds.core.util import net
|
from uds.core.util import config, log, net
|
||||||
from uds.core.util import config
|
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.util.stats import events
|
from uds.core.util.stats import events
|
||||||
from uds.core.types.states import State
|
|
||||||
from uds.core.managers.crypto import CryptoManager
|
|
||||||
from uds.core.auths import Authenticator as AuthenticatorInstance
|
|
||||||
|
|
||||||
from uds import models
|
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -396,9 +387,7 @@ def web_login(
|
|||||||
Helper function to, once the user is authenticated, store the information at the user session.
|
Helper function to, once the user is authenticated, store the information at the user session.
|
||||||
@return: Always returns True
|
@return: Always returns True
|
||||||
"""
|
"""
|
||||||
from uds import ( # pylint: disable=import-outside-toplevel # to avoid circular imports
|
from uds import REST # pylint: disable=import-outside-toplevel # to avoid circular imports
|
||||||
REST,
|
|
||||||
)
|
|
||||||
|
|
||||||
if user.id != consts.auth.ROOT_ID: # If not ROOT user (this user is not inside any authenticator)
|
if user.id != consts.auth.ROOT_ID: # If not ROOT user (this user is not inside any authenticator)
|
||||||
manager_id = user.manager.id
|
manager_id = user.manager.id
|
||||||
@ -515,12 +504,12 @@ def log_login(
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
level = log.LogLevel.INFO if log_string == 'Logged in' else log.LogLevel.ERROR
|
level = types.log.LogLevel.INFO if log_string == 'Logged in' else types.log.LogLevel.ERROR
|
||||||
log.log(
|
log.log(
|
||||||
authenticator,
|
authenticator,
|
||||||
level,
|
level,
|
||||||
f'user {userName} has {log_string} from {request.ip} where os is {request.os.os.name}',
|
f'user {userName} has {log_string} from {request.ip} where os is {request.os.os.name}',
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -530,7 +519,7 @@ def log_login(
|
|||||||
user,
|
user,
|
||||||
level,
|
level,
|
||||||
f'{log_string} from {request.ip} where OS is {request.os.os.name}',
|
f'{log_string} from {request.ip} where OS is {request.os.os.name}',
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
except Exception: # nosec: root user is not on any authenticator, will fail with an exception we can ingore
|
except Exception: # nosec: root user is not on any authenticator, will fail with an exception we can ingore
|
||||||
logger.info('Root %s from %s where OS is %s', log_string, request.ip, request.os.os.name)
|
logger.info('Root %s from %s where OS is %s', log_string, request.ip, request.os.os.name)
|
||||||
@ -541,10 +530,10 @@ def log_logout(request: 'ExtendedHttpRequest') -> None:
|
|||||||
if request.user.manager.id:
|
if request.user.manager.id:
|
||||||
log.log(
|
log.log(
|
||||||
request.user.manager,
|
request.user.manager,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'user {request.user.name} has logged out from {request.ip}',
|
f'user {request.user.name} has logged out from {request.ip}',
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
log.log(request.user, log.LogLevel.INFO, f'has logged out from {request.ip}', log.LogSource.WEB)
|
log.log(request.user, types.log.LogLevel.INFO, f'has logged out from {request.ip}', types.log.LogSource.WEB)
|
||||||
else:
|
else:
|
||||||
logger.info('Root has logged out from %s', request.ip)
|
logger.info('Root has logged out from %s', request.ip)
|
||||||
|
@ -42,7 +42,7 @@ from django.db import transaction, OperationalError
|
|||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
||||||
from uds.models import DelayedTask as DBDelayedTask
|
from uds.models import DelayedTask as DBDelayedTask
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.environment import Environment
|
from uds.core.environment import Environment
|
||||||
from uds.core.util import singleton
|
from uds.core.util import singleton
|
||||||
|
|
||||||
@ -107,7 +107,7 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
|||||||
return DelayedTaskRunner()
|
return DelayedTaskRunner()
|
||||||
|
|
||||||
def execute_delayed_task(self) -> None:
|
def execute_delayed_task(self) -> None:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
filt = Q(execution_time__lt=now) | Q(insert_date__gt=now + timedelta(seconds=30))
|
filt = Q(execution_time__lt=now) | Q(insert_date__gt=now + timedelta(seconds=30))
|
||||||
# If next execution is before now or last execution is in the future (clock changed on this server, we take that task as executable)
|
# If next execution is before now or last execution is in the future (clock changed on this server, we take that task as executable)
|
||||||
try:
|
try:
|
||||||
@ -141,7 +141,7 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
|
|||||||
DelayedTaskThread(task_instance).start()
|
DelayedTaskThread(task_instance).start()
|
||||||
|
|
||||||
def _insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
|
def _insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
exec_time = now + timedelta(seconds=delay)
|
exec_time = now + timedelta(seconds=delay)
|
||||||
cls = instance.__class__
|
cls = instance.__class__
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ class JobsFactory(factory.Factory['Job']):
|
|||||||
Ensures that uds core workers are correctly registered in database and in factory
|
Ensures that uds core workers are correctly registered in database and in factory
|
||||||
"""
|
"""
|
||||||
from uds.models import Scheduler # pylint: disable=import-outside-toplevel
|
from uds.models import Scheduler # pylint: disable=import-outside-toplevel
|
||||||
from uds.core.util.model import sql_datetime # pylint: disable=import-outside-toplevel
|
from uds.core.util.model import sql_now # pylint: disable=import-outside-toplevel
|
||||||
from uds.core.types.states import State # pylint: disable=import-outside-toplevel
|
from uds.core.types.states import State # pylint: disable=import-outside-toplevel
|
||||||
from uds.core import workers # pylint: disable=import-outside-toplevel
|
from uds.core import workers # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ class JobsFactory(factory.Factory['Job']):
|
|||||||
try:
|
try:
|
||||||
type_.setup()
|
type_.setup()
|
||||||
# We use database server datetime
|
# We use database server datetime
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
next_ = now
|
next_ = now
|
||||||
job = Scheduler.objects.create(
|
job = Scheduler.objects.create(
|
||||||
name=name,
|
name=name,
|
||||||
|
@ -41,7 +41,7 @@ from django.db import transaction, DatabaseError, connections
|
|||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
||||||
from uds.models import Scheduler as DBScheduler
|
from uds.models import Scheduler as DBScheduler
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from .jobs_factory import JobsFactory
|
from .jobs_factory import JobsFactory
|
||||||
|
|
||||||
@ -107,7 +107,7 @@ class JobThread(threading.Thread):
|
|||||||
DBScheduler.objects.select_for_update().filter(id=self._db_job_id).update(
|
DBScheduler.objects.select_for_update().filter(id=self._db_job_id).update(
|
||||||
state=State.FOR_EXECUTE,
|
state=State.FOR_EXECUTE,
|
||||||
owner_server='',
|
owner_server='',
|
||||||
next_execution=sql_datetime() + timedelta(seconds=self._freq),
|
next_execution=sql_now() + timedelta(seconds=self._freq),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ class Scheduler:
|
|||||||
"""
|
"""
|
||||||
jobInstance = None
|
jobInstance = None
|
||||||
try:
|
try:
|
||||||
now = sql_datetime() # Datetimes are based on database server times
|
now = sql_now() # Datetimes are based on database server times
|
||||||
fltr = Q(state=State.FOR_EXECUTE) & (
|
fltr = Q(state=State.FOR_EXECUTE) & (
|
||||||
Q(last_execution__gt=now) | Q(next_execution__lt=now)
|
Q(last_execution__gt=now) | Q(next_execution__lt=now)
|
||||||
)
|
)
|
||||||
@ -206,7 +206,7 @@ class Scheduler:
|
|||||||
owner_server=''
|
owner_server=''
|
||||||
) # @UndefinedVariable
|
) # @UndefinedVariable
|
||||||
DBScheduler.objects.select_for_update().filter(
|
DBScheduler.objects.select_for_update().filter(
|
||||||
last_execution__lt=sql_datetime() - timedelta(minutes=15),
|
last_execution__lt=sql_now() - timedelta(minutes=15),
|
||||||
state=State.RUNNING,
|
state=State.RUNNING,
|
||||||
).update(
|
).update(
|
||||||
owner_server='', state=State.FOR_EXECUTE
|
owner_server='', state=State.FOR_EXECUTE
|
||||||
|
@ -34,7 +34,7 @@ import typing
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from uds.core.util import singleton
|
from uds.core.util import singleton
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.models.log import Log
|
from uds.models.log import Log
|
||||||
# from uds.core.workers.log
|
# from uds.core.workers.log
|
||||||
|
|
||||||
@ -74,7 +74,7 @@ class LogManager(metaclass=singleton.Singleton):
|
|||||||
Log.objects.create(
|
Log.objects.create(
|
||||||
owner_type=owner_type.value,
|
owner_type=owner_type.value,
|
||||||
owner_id=owner_id,
|
owner_id=owner_id,
|
||||||
created=sql_datetime(),
|
created=sql_now(),
|
||||||
source=source,
|
source=source,
|
||||||
level=level,
|
level=level,
|
||||||
data=message,
|
data=message,
|
||||||
|
@ -35,8 +35,8 @@ import typing
|
|||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db import connections
|
from django.db import connections
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
from uds.core.util import singleton
|
from uds.core.util import singleton
|
||||||
from uds.core.util.log import LogLevel
|
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
pass
|
pass
|
||||||
@ -73,7 +73,7 @@ class NotificationsManager(metaclass=singleton.Singleton):
|
|||||||
def manager() -> 'NotificationsManager':
|
def manager() -> 'NotificationsManager':
|
||||||
return NotificationsManager() # Singleton pattern will return always the same instance
|
return NotificationsManager() # Singleton pattern will return always the same instance
|
||||||
|
|
||||||
def notify(self, group: str, identificator: str, level: LogLevel, message: str, *args: typing.Any) -> None:
|
def notify(self, group: str, identificator: str, level: types.log.LogLevel, message: str, *args: typing.Any) -> None:
|
||||||
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
|
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
# Due to use of local db, we must ensure that it exists (and cannot do it on ready)
|
# Due to use of local db, we must ensure that it exists (and cannot do it on ready)
|
||||||
|
@ -46,7 +46,7 @@ from uds.core.types.states import State
|
|||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
|
|
||||||
from uds.models import ServicePoolPublication, ServicePool
|
from uds.models import ServicePoolPublication, ServicePool
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
from uds.core.util import singleton
|
from uds.core.util import singleton
|
||||||
|
|
||||||
@ -73,7 +73,7 @@ class PublicationOldMachinesCleaner(DelayedTask):
|
|||||||
if servicePoolPub.state != State.REMOVABLE:
|
if servicePoolPub.state != State.REMOVABLE:
|
||||||
logger.info('Already removed')
|
logger.info('Already removed')
|
||||||
|
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
current_publication: typing.Optional[ServicePoolPublication] = (
|
current_publication: typing.Optional[ServicePoolPublication] = (
|
||||||
servicePoolPub.deployed_service.active_publication()
|
servicePoolPub.deployed_service.active_publication()
|
||||||
)
|
)
|
||||||
@ -100,7 +100,7 @@ class PublicationLauncher(DelayedTask):
|
|||||||
logger.debug('Publishing')
|
logger.debug('Publishing')
|
||||||
servicePoolPub: typing.Optional[ServicePoolPublication] = None
|
servicePoolPub: typing.Optional[ServicePoolPublication] = None
|
||||||
try:
|
try:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
servicePoolPub = ServicePoolPublication.objects.select_for_update().get(pk=self._publicationId)
|
servicePoolPub = ServicePoolPublication.objects.select_for_update().get(pk=self._publicationId)
|
||||||
if not servicePoolPub:
|
if not servicePoolPub:
|
||||||
@ -267,7 +267,7 @@ class PublicationManager(metaclass=singleton.Singleton):
|
|||||||
|
|
||||||
publication: typing.Optional[ServicePoolPublication] = None
|
publication: typing.Optional[ServicePoolPublication] = None
|
||||||
try:
|
try:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
publication = servicepool.publications.create(
|
publication = servicepool.publications.create(
|
||||||
state=State.LAUNCHING,
|
state=State.LAUNCHING,
|
||||||
state_date=now,
|
state_date=now,
|
||||||
@ -303,9 +303,9 @@ class PublicationManager(metaclass=singleton.Singleton):
|
|||||||
logger.info('Double cancel invoked for a publication')
|
logger.info('Double cancel invoked for a publication')
|
||||||
log.log(
|
log.log(
|
||||||
publication.deployed_service,
|
publication.deployed_service,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
'Forced cancel on publication, you must check uncleaned resources manually',
|
'Forced cancel on publication, you must check uncleaned resources manually',
|
||||||
log.LogSource.ADMIN,
|
types.log.LogSource.ADMIN,
|
||||||
)
|
)
|
||||||
publication.set_state(State.CANCELED)
|
publication.set_state(State.CANCELED)
|
||||||
publication.save()
|
publication.save()
|
||||||
|
@ -215,7 +215,7 @@ class ServerManager(metaclass=singleton.Singleton):
|
|||||||
|
|
||||||
# Look for existing user asignation through properties
|
# Look for existing user asignation through properties
|
||||||
prop_name = self.property_name(userservice.user)
|
prop_name = self.property_name(userservice.user)
|
||||||
now = model_utils.sql_datetime()
|
now = model_utils.sql_now()
|
||||||
|
|
||||||
excluded_servers_uuids = excluded_servers_uuids or set()
|
excluded_servers_uuids = excluded_servers_uuids or set()
|
||||||
|
|
||||||
@ -457,7 +457,7 @@ class ServerManager(metaclass=singleton.Singleton):
|
|||||||
List of servers sorted by usage
|
List of servers sorted by usage
|
||||||
"""
|
"""
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
now = model_utils.sql_datetime()
|
now = model_utils.sql_now()
|
||||||
fltrs = server_group.servers.filter(maintenance_mode=False)
|
fltrs = server_group.servers.filter(maintenance_mode=False)
|
||||||
fltrs = fltrs.filter(Q(locked_until=None) | Q(locked_until__lte=now)) # Only unlocked servers
|
fltrs = fltrs.filter(Q(locked_until=None) | Q(locked_until__lte=now)) # Only unlocked servers
|
||||||
if excluded_servers_uuids:
|
if excluded_servers_uuids:
|
||||||
|
@ -38,25 +38,25 @@ from django.conf import settings
|
|||||||
from uds import models
|
from uds import models
|
||||||
from uds.core import consts, osmanagers, types
|
from uds.core import consts, osmanagers, types
|
||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.REST.utils import rest_result
|
from uds.REST.utils import rest_result
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def process_log(server: 'models.Server', data: dict[str, typing.Any]) -> typing.Any:
|
def process_log(server: 'models.Server', data: dict[str, typing.Any]) -> typing.Any:
|
||||||
# Log level is an string, as in log.LogLevel
|
# Log level is an string, as in types.log.LogLevel
|
||||||
if data.get('userservice_uuid', None): # Log for an user service
|
if data.get('userservice_uuid', None): # Log for an user service
|
||||||
try:
|
try:
|
||||||
userService = models.UserService.objects.get(uuid=data['userservice_uuid'])
|
userService = models.UserService.objects.get(uuid=data['userservice_uuid'])
|
||||||
log.log(
|
log.log(
|
||||||
userService, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER
|
userService, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER
|
||||||
)
|
)
|
||||||
return rest_result(consts.OK)
|
return rest_result(consts.OK)
|
||||||
except models.UserService.DoesNotExist:
|
except models.UserService.DoesNotExist:
|
||||||
pass # If not found, log on server
|
pass # If not found, log on server
|
||||||
|
|
||||||
log.log(server, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER)
|
log.log(server, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER)
|
||||||
|
|
||||||
return rest_result(consts.OK)
|
return rest_result(consts.OK)
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ def process_ping(server: 'models.Server', data: dict[str, typing.Any]) -> typing
|
|||||||
if 'stats' in data:
|
if 'stats' in data:
|
||||||
server.stats = types.servers.ServerStats.from_dict(data['stats'])
|
server.stats = types.servers.ServerStats.from_dict(data['stats'])
|
||||||
# Set stats on server
|
# Set stats on server
|
||||||
server.last_ping = sql_datetime()
|
server.last_ping = sql_now()
|
||||||
|
|
||||||
return rest_result(consts.OK)
|
return rest_result(consts.OK)
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ import collections.abc
|
|||||||
from uds.core import types, consts
|
from uds.core import types, consts
|
||||||
|
|
||||||
from uds.core.util import security, cache
|
from uds.core.util import security, cache
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -63,7 +63,7 @@ def restrain_server(func: collections.abc.Callable[..., typing.Any]) -> collecti
|
|||||||
try:
|
try:
|
||||||
return func(self, *args, **kwargs)
|
return func(self, *args, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
restrained_until = sql_datetime() + datetime.timedelta(seconds=consts.system.FAILURE_TIMEOUT)
|
restrained_until = sql_now() + datetime.timedelta(seconds=consts.system.FAILURE_TIMEOUT)
|
||||||
logger.exception('Error executing %s: %s. Server restrained until %s', func.__name__, e, restrained_until)
|
logger.exception('Error executing %s: %s. Server restrained until %s', func.__name__, e, restrained_until)
|
||||||
self.server.set_restrained_until(
|
self.server.set_restrained_until(
|
||||||
restrained_until
|
restrained_until
|
||||||
|
@ -39,7 +39,7 @@ import typing
|
|||||||
from uds.core import types
|
from uds.core import types
|
||||||
from uds.core.util import singleton
|
from uds.core.util import singleton
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.util.model import sql_datetime, sql_stamp_seconds
|
from uds.core.util.model import sql_now, sql_stamp_seconds
|
||||||
from uds.models import StatsCounters, StatsCountersAccum, StatsEvents
|
from uds.models import StatsCounters, StatsCountersAccum, StatsEvents
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -85,7 +85,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
model: type[typing.Union['StatsCounters', 'StatsEvents', 'StatsCountersAccum']],
|
model: type[typing.Union['StatsCounters', 'StatsEvents', 'StatsCountersAccum']],
|
||||||
) -> None:
|
) -> None:
|
||||||
minTime = time.mktime(
|
minTime = time.mktime(
|
||||||
(sql_datetime() - datetime.timedelta(days=GlobalConfig.STATS_DURATION.as_int())).timetuple()
|
(sql_now() - datetime.timedelta(days=GlobalConfig.STATS_DURATION.as_int())).timetuple()
|
||||||
)
|
)
|
||||||
model.objects.filter(stamp__lt=minTime).delete()
|
model.objects.filter(stamp__lt=minTime).delete()
|
||||||
|
|
||||||
@ -115,7 +115,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
Nothing
|
Nothing
|
||||||
"""
|
"""
|
||||||
if stamp is None:
|
if stamp is None:
|
||||||
stamp = sql_datetime()
|
stamp = sql_now()
|
||||||
|
|
||||||
# To Unix epoch
|
# To Unix epoch
|
||||||
stampInt = int(time.mktime(stamp.timetuple())) # pylint: disable=maybe-no-member
|
stampInt = int(time.mktime(stamp.timetuple())) # pylint: disable=maybe-no-member
|
||||||
@ -188,7 +188,7 @@ class StatsManager(metaclass=singleton.Singleton):
|
|||||||
if since is None:
|
if since is None:
|
||||||
if points is None:
|
if points is None:
|
||||||
points = 100 # If since is not specified, we need at least points, get a default
|
points = 100 # If since is not specified, we need at least points, get a default
|
||||||
since = sql_datetime() - datetime.timedelta(seconds=intervalType.seconds() * points)
|
since = sql_now() - datetime.timedelta(seconds=intervalType.seconds() * points)
|
||||||
|
|
||||||
if isinstance(since, datetime.datetime):
|
if isinstance(since, datetime.datetime):
|
||||||
since = int(since.timestamp())
|
since = int(since.timestamp())
|
||||||
|
@ -50,7 +50,7 @@ from uds.core.services.exceptions import (
|
|||||||
)
|
)
|
||||||
from uds.core.util import log, singleton
|
from uds.core.util import log, singleton
|
||||||
from uds.core.util.decorators import cached
|
from uds.core.util.decorators import cached
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.util.stats import events
|
from uds.core.util.stats import events
|
||||||
from uds.models import MetaPool, ServicePool, ServicePoolPublication, Transport, User, UserService
|
from uds.models import MetaPool, ServicePool, ServicePoolPublication, Transport, User, UserService
|
||||||
@ -124,7 +124,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
"""
|
"""
|
||||||
# Checks if userservices_limit has been reached and if so, raises an exception
|
# Checks if userservices_limit has been reached and if so, raises an exception
|
||||||
self._check_user_services_limit_reached(publication.deployed_service)
|
self._check_user_services_limit_reached(publication.deployed_service)
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
return publication.userServices.create(
|
return publication.userServices.create(
|
||||||
cache_level=cacheLevel,
|
cache_level=cacheLevel,
|
||||||
state=State.PREPARING,
|
state=State.PREPARING,
|
||||||
@ -144,7 +144,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
Private method to instatiate an assigned element at database with default state
|
Private method to instatiate an assigned element at database with default state
|
||||||
"""
|
"""
|
||||||
self._check_user_services_limit_reached(publication.deployed_service)
|
self._check_user_services_limit_reached(publication.deployed_service)
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
return publication.userServices.create(
|
return publication.userServices.create(
|
||||||
cache_level=0,
|
cache_level=0,
|
||||||
state=State.PREPARING,
|
state=State.PREPARING,
|
||||||
@ -166,7 +166,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
an UserService with no publications, and create them from an ServicePool
|
an UserService with no publications, and create them from an ServicePool
|
||||||
"""
|
"""
|
||||||
self._check_user_services_limit_reached(service_pool)
|
self._check_user_services_limit_reached(service_pool)
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
return service_pool.userServices.create(
|
return service_pool.userServices.create(
|
||||||
cache_level=0,
|
cache_level=0,
|
||||||
state=State.PREPARING,
|
state=State.PREPARING,
|
||||||
@ -509,9 +509,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
): # cacheUpdater will drop unnecesary L1 machines, so it's not neccesary to check against inCacheL1
|
): # cacheUpdater will drop unnecesary L1 machines, so it's not neccesary to check against inCacheL1
|
||||||
log.log(
|
log.log(
|
||||||
service_pool,
|
service_pool,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Max number of services reached: {service_pool.max_srvs}',
|
f'Max number of services reached: {service_pool.max_srvs}',
|
||||||
log.LogSource.INTERNAL,
|
types.log.LogSource.INTERNAL,
|
||||||
)
|
)
|
||||||
raise MaxServicesReachedError()
|
raise MaxServicesReachedError()
|
||||||
|
|
||||||
@ -805,9 +805,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
service_status = types.services.ReadyStatus.USERSERVICE_NO_IP
|
service_status = types.services.ReadyStatus.USERSERVICE_NO_IP
|
||||||
log.log(
|
log.log(
|
||||||
user_service,
|
user_service,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f"User {user.pretty_name} from {src_ip} has initiated access",
|
f"User {user.pretty_name} from {src_ip} has initiated access",
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
# If ready, show transport for this service, if also ready ofc
|
# If ready, show transport for this service, if also ready ofc
|
||||||
userServiceInstance = user_service.get_instance()
|
userServiceInstance = user_service.get_instance()
|
||||||
@ -819,9 +819,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
service_status = types.services.ReadyStatus.USERSERVICE_INVALID_UUID
|
service_status = types.services.ReadyStatus.USERSERVICE_INVALID_UUID
|
||||||
log.log(
|
log.log(
|
||||||
user_service,
|
user_service,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'User service is not accessible due to invalid UUID (user: {user.pretty_name}, ip: {ip})',
|
f'User service is not accessible due to invalid UUID (user: {user.pretty_name}, ip: {ip})',
|
||||||
log.LogSource.TRANSPORT,
|
types.log.LogSource.TRANSPORT,
|
||||||
)
|
)
|
||||||
logger.debug('UUID check failed for user service %s', user_service)
|
logger.debug('UUID check failed for user service %s', user_service)
|
||||||
else:
|
else:
|
||||||
@ -837,7 +837,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
service_status = types.services.ReadyStatus.TRANSPORT_NOT_READY
|
service_status = types.services.ReadyStatus.TRANSPORT_NOT_READY
|
||||||
transportInstance = transport.get_instance()
|
transportInstance = transport.get_instance()
|
||||||
if transportInstance.is_ip_allowed(user_service, ip):
|
if transportInstance.is_ip_allowed(user_service, ip):
|
||||||
log.log(user_service, log.LogLevel.INFO, "User service ready", log.LogSource.WEB)
|
log.log(
|
||||||
|
user_service, types.log.LogLevel.INFO, "User service ready", types.log.LogSource.WEB
|
||||||
|
)
|
||||||
self.notify_preconnect(
|
self.notify_preconnect(
|
||||||
user_service,
|
user_service,
|
||||||
transportInstance.get_connection_info(user_service, user, ''),
|
transportInstance.get_connection_info(user_service, user, ''),
|
||||||
@ -858,7 +860,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
)
|
)
|
||||||
|
|
||||||
message = transportInstance.get_available_error_msg(user_service, ip)
|
message = transportInstance.get_available_error_msg(user_service, ip)
|
||||||
log.log(user_service, log.LogLevel.WARNING, message, log.LogSource.TRANSPORT)
|
log.log(user_service, types.log.LogLevel.WARNING, message, types.log.LogSource.TRANSPORT)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Transport is not ready for user service %s: %s',
|
'Transport is not ready for user service %s: %s',
|
||||||
user_service,
|
user_service,
|
||||||
@ -869,9 +871,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
else:
|
else:
|
||||||
log.log(
|
log.log(
|
||||||
user_service,
|
user_service,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'User {user.pretty_name} from {src_ip} tried to access, but service was not ready',
|
f'User {user.pretty_name} from {src_ip} tried to access, but service was not ready',
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
|
|
||||||
trace_logger.error(
|
trace_logger.error(
|
||||||
@ -1059,8 +1061,8 @@ class UserServiceManager(metaclass=singleton.Singleton):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
meta,
|
meta,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'No user service accessible from device (ip {srcIp}, os: {os.os.name})',
|
f'No user service accessible from device (ip {srcIp}, os: {os.os.name})',
|
||||||
log.LogSource.SERVICE,
|
types.log.LogSource.SERVICE,
|
||||||
)
|
)
|
||||||
raise InvalidServiceException(_('The service is not accessible from this device'))
|
raise InvalidServiceException(_('The service is not accessible from this device'))
|
||||||
|
@ -65,7 +65,7 @@ class StateUpdater(abc.ABC):
|
|||||||
logger.error('Got error on processor: %s', msg)
|
logger.error('Got error on processor: %s', msg)
|
||||||
self.save(types.states.State.ERROR)
|
self.save(types.states.State.ERROR)
|
||||||
if msg is not None:
|
if msg is not None:
|
||||||
log.log(self.user_service, log.LogLevel.ERROR, msg, log.LogSource.INTERNAL)
|
log.log(self.user_service, types.log.LogLevel.ERROR, msg, types.log.LogSource.INTERNAL)
|
||||||
|
|
||||||
def save(self, newState: typing.Optional[str] = None) -> None:
|
def save(self, newState: typing.Optional[str] = None) -> None:
|
||||||
if newState:
|
if newState:
|
||||||
@ -263,7 +263,7 @@ class UserServiceOpChecker(DelayedTask):
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Checking service state')
|
logger.exception('Checking service state')
|
||||||
log.log(userservice, log.LogLevel.ERROR, f'Exception: {e}', log.LogSource.INTERNAL)
|
log.log(userservice, types.log.LogLevel.ERROR, f'Exception: {e}', types.log.LogSource.INTERNAL)
|
||||||
userservice.set_state(types.states.State.ERROR)
|
userservice.set_state(types.states.State.ERROR)
|
||||||
userservice.save(update_fields=['data'])
|
userservice.save(update_fields=['data'])
|
||||||
|
|
||||||
@ -304,7 +304,7 @@ class UserServiceOpChecker(DelayedTask):
|
|||||||
# Exception caught, mark service as errored
|
# Exception caught, mark service as errored
|
||||||
logger.exception("Error %s, %s :", e.__class__, e)
|
logger.exception("Error %s, %s :", e.__class__, e)
|
||||||
if userservice:
|
if userservice:
|
||||||
log.log(userservice, log.LogLevel.ERROR, f'Exception: {e}', log.LogSource.INTERNAL)
|
log.log(userservice, types.log.LogLevel.ERROR, f'Exception: {e}', types.log.LogSource.INTERNAL)
|
||||||
try:
|
try:
|
||||||
userservice.set_state(types.states.State.ERROR)
|
userservice.set_state(types.states.State.ERROR)
|
||||||
userservice.save(update_fields=['data'])
|
userservice.save(update_fields=['data'])
|
||||||
|
@ -36,9 +36,9 @@ import typing
|
|||||||
from uds.core.managers.task import BaseThread
|
from uds.core.managers.task import BaseThread
|
||||||
|
|
||||||
from uds.models import Notifier, Notification
|
from uds.models import Notifier, Notification
|
||||||
from uds.core import consts
|
from uds.core import consts, types
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from .provider import Notifier as NotificationProviderModule, LogLevel
|
from .provider import Notifier as NotificationProviderModule
|
||||||
from .config import DO_NOT_REPEAT
|
from .config import DO_NOT_REPEAT
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -76,7 +76,7 @@ class MessageProcessorThread(BaseThread):
|
|||||||
while self._keep_running:
|
while self._keep_running:
|
||||||
# Locate all notifications from "persistent" and try to process them
|
# Locate all notifications from "persistent" and try to process them
|
||||||
# If no notification can be fully resolved, it will be kept in the database
|
# If no notification can be fully resolved, it will be kept in the database
|
||||||
not_before = sql_datetime() - datetime.timedelta(
|
not_before = sql_now() - datetime.timedelta(
|
||||||
seconds=DO_NOT_REPEAT.as_int()
|
seconds=DO_NOT_REPEAT.as_int()
|
||||||
)
|
)
|
||||||
for n in Notification.get_persistent_queryset().all():
|
for n in Notification.get_persistent_queryset().all():
|
||||||
@ -130,7 +130,7 @@ class MessageProcessorThread(BaseThread):
|
|||||||
p.notify(
|
p.notify(
|
||||||
n.group,
|
n.group,
|
||||||
n.identificator,
|
n.identificator,
|
||||||
LogLevel.from_int(n.level),
|
types.log.LogLevel.from_int(n.level),
|
||||||
n.message,
|
n.message,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -44,7 +44,7 @@ from django.utils.translation import gettext_noop as _
|
|||||||
from uds.core import exceptions, types
|
from uds.core import exceptions, types
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.module import Module
|
from uds.core.module import Module
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.models.network import Network
|
from uds.models.network import Network
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -230,7 +230,7 @@ class MFA(Module):
|
|||||||
Internal method to put the data into storage
|
Internal method to put the data into storage
|
||||||
"""
|
"""
|
||||||
storageKey = request.ip + userId
|
storageKey = request.ip + userId
|
||||||
self.storage.save_pickled(storageKey, (sql_datetime(), code))
|
self.storage.save_pickled(storageKey, (sql_now(), code))
|
||||||
|
|
||||||
def process(
|
def process(
|
||||||
self,
|
self,
|
||||||
@ -267,7 +267,7 @@ class MFA(Module):
|
|||||||
try:
|
try:
|
||||||
if data and validity:
|
if data and validity:
|
||||||
# if we have a stored code, check if it's still valid
|
# if we have a stored code, check if it's still valid
|
||||||
if data[0] + datetime.timedelta(seconds=validity) > sql_datetime():
|
if data[0] + datetime.timedelta(seconds=validity) > sql_now():
|
||||||
# if it's still valid, just return without sending a new one
|
# if it's still valid, just return without sending a new one
|
||||||
return MFA.RESULT.OK
|
return MFA.RESULT.OK
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -320,7 +320,7 @@ class MFA(Module):
|
|||||||
data = self._get_data(request, userId)
|
data = self._get_data(request, userId)
|
||||||
if data and len(data) == 2:
|
if data and len(data) == 2:
|
||||||
validity = validity if validity is not None else 0
|
validity = validity if validity is not None else 0
|
||||||
if validity > 0 and data[0] + datetime.timedelta(seconds=validity) < sql_datetime():
|
if validity > 0 and data[0] + datetime.timedelta(seconds=validity) < sql_now():
|
||||||
# if it is no more valid, raise an error
|
# if it is no more valid, raise an error
|
||||||
# Remove stored code and raise error
|
# Remove stored code and raise error
|
||||||
self._remove_data(request, userId)
|
self._remove_data(request, userId)
|
||||||
|
@ -262,9 +262,9 @@ class OSManager(Module):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'User {username} has logged in',
|
f'User {username} has logged in',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
|
|
||||||
log.log_use(
|
log.log_use(
|
||||||
@ -326,9 +326,9 @@ class OSManager(Module):
|
|||||||
|
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'User {username} has logged out',
|
f'User {username} has logged out',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
|
|
||||||
log.log_use(
|
log.log_use(
|
||||||
|
@ -37,7 +37,7 @@ import typing
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
|
|
||||||
from uds.core import services, types, consts
|
from uds.core import services, types, consts
|
||||||
from uds.core.util import log, autoserializable
|
from uds.core.util import autoserializable
|
||||||
from uds.core.util.model import sql_stamp_seconds
|
from uds.core.util.model import sql_stamp_seconds
|
||||||
|
|
||||||
from .. import exceptions
|
from .. import exceptions
|
||||||
@ -214,7 +214,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
|||||||
self._error_debug_info = self._debug(repr(reason))
|
self._error_debug_info = self._debug(repr(reason))
|
||||||
reason = str(reason)
|
reason = str(reason)
|
||||||
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
|
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._vmid:
|
if self._vmid:
|
||||||
if self.service().should_maintain_on_error() is False:
|
if self.service().should_maintain_on_error() is False:
|
||||||
@ -224,7 +224,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Exception removing machine %s: %s', self._vmid, e)
|
logger.exception('Exception removing machine %s: %s', self._vmid, e)
|
||||||
self._vmid = ''
|
self._vmid = ''
|
||||||
self.do_log(log.LogLevel.ERROR, f'Error removing machine: {e}')
|
self.do_log(types.log.LogLevel.ERROR, f'Error removing machine: {e}')
|
||||||
else:
|
else:
|
||||||
logger.debug('Keep on error is enabled, not removing machine')
|
logger.debug('Keep on error is enabled, not removing machine')
|
||||||
self._set_queue([types.services.Operation.FINISH] if self.keep_state_sets_error else [types.services.Operation.ERROR])
|
self._set_queue([types.services.Operation.FINISH] if self.keep_state_sets_error else [types.services.Operation.ERROR])
|
||||||
@ -676,7 +676,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
|
|||||||
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
|
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
|
||||||
logger.debug('Time is consumed, falling back to stop on vmid %s', self._vmid)
|
logger.debug('Time is consumed, falling back to stop on vmid %s', self._vmid)
|
||||||
self.do_log(
|
self.do_log(
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
|
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
|
||||||
)
|
)
|
||||||
# Not stopped by guest in time, but must be stopped normally
|
# Not stopped by guest in time, but must be stopped normally
|
||||||
|
@ -37,7 +37,7 @@ import collections.abc
|
|||||||
|
|
||||||
from uds.core import consts, services, types
|
from uds.core import consts, services, types
|
||||||
from uds.core.types.services import Operation
|
from uds.core.types.services import Operation
|
||||||
from uds.core.util import log, autoserializable
|
from uds.core.util import autoserializable
|
||||||
|
|
||||||
from .. import exceptions
|
from .. import exceptions
|
||||||
|
|
||||||
@ -145,7 +145,7 @@ class FixedUserService(services.UserService, autoserializable.AutoSerializable,
|
|||||||
"""
|
"""
|
||||||
reason = str(reason)
|
reason = str(reason)
|
||||||
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
|
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._vmid:
|
if self._vmid:
|
||||||
if self.service().should_maintain_on_error() is False:
|
if self.service().should_maintain_on_error() is False:
|
||||||
@ -229,7 +229,7 @@ class FixedUserService(services.UserService, autoserializable.AutoSerializable,
|
|||||||
if self._vmid:
|
if self._vmid:
|
||||||
return self.service().get_ip(self._vmid)
|
return self.service().get_ip(self._vmid)
|
||||||
except exceptions.NotFoundError:
|
except exceptions.NotFoundError:
|
||||||
self.do_log(log.LogLevel.ERROR, f'Machine not found: {self._vmid}::{self._name}')
|
self.do_log(types.log.LogLevel.ERROR, f'Machine not found: {self._vmid}::{self._name}')
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
@ -224,14 +224,14 @@ class ServiceProvider(module.Module):
|
|||||||
|
|
||||||
return ret_val
|
return ret_val
|
||||||
|
|
||||||
def do_log(self, level: log.LogLevel, message: str) -> None:
|
def do_log(self, level: 'types.log.LogLevel', message: str) -> None:
|
||||||
"""
|
"""
|
||||||
Logs a message with requested level associated with this service
|
Logs a message with requested level associated with this service
|
||||||
"""
|
"""
|
||||||
from uds.models import Provider as DBProvider # pylint: disable=import-outside-toplevel
|
from uds.models import Provider as DBProvider # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
if self.get_uuid():
|
if self.get_uuid():
|
||||||
log.log(DBProvider.objects.get(uuid=self.get_uuid()), level, message, log.LogSource.SERVICE)
|
log.log(DBProvider.objects.get(uuid=self.get_uuid()), level, message, types.log.LogSource.SERVICE)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
@ -482,14 +482,14 @@ class Service(Module):
|
|||||||
"""
|
"""
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def do_log(self, level: log.LogLevel, message: str) -> None:
|
def do_log(self, level: types.log.LogLevel, message: str) -> None:
|
||||||
"""
|
"""
|
||||||
Logs a message with requested level associated with this service
|
Logs a message with requested level associated with this service
|
||||||
"""
|
"""
|
||||||
from uds.models import Service as DBService # pylint: disable=import-outside-toplevel
|
from uds.models import Service as DBService # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
if self.get_uuid():
|
if self.get_uuid():
|
||||||
log.log(DBService.objects.get(uuid=self.get_uuid()), level, message, log.LogSource.SERVICE)
|
log.log(DBService.objects.get(uuid=self.get_uuid()), level, message, types.log.LogSource.SERVICE)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def can_assign(cls) -> bool:
|
def can_assign(cls) -> bool:
|
||||||
|
@ -227,12 +227,12 @@ class UserService(Environmentable, Serializable, abc.ABC):
|
|||||||
def get_uuid(self) -> str:
|
def get_uuid(self) -> str:
|
||||||
return self._uuid
|
return self._uuid
|
||||||
|
|
||||||
def do_log(self, level: log.LogLevel, message: str) -> None:
|
def do_log(self, level: types.log.LogLevel, message: str) -> None:
|
||||||
"""
|
"""
|
||||||
Logs a message with requested level associated with this user deployment
|
Logs a message with requested level associated with this user deployment
|
||||||
"""
|
"""
|
||||||
if self._db_obj:
|
if self._db_obj:
|
||||||
log.log(self._db_obj, level, message, log.LogSource.SERVICE)
|
log.log(self._db_obj, level, message, types.log.LogSource.SERVICE)
|
||||||
|
|
||||||
def mac_generator(self) -> 'UniqueMacGenerator':
|
def mac_generator(self) -> 'UniqueMacGenerator':
|
||||||
"""
|
"""
|
||||||
|
@ -47,6 +47,7 @@ from . import (
|
|||||||
transports,
|
transports,
|
||||||
ui,
|
ui,
|
||||||
core,
|
core,
|
||||||
|
log,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Log is not imported here, as it is a special case with lots of dependencies
|
# Log is not imported here, as it is a special case with lots of dependencies
|
||||||
|
@ -3,12 +3,90 @@ import collections.abc
|
|||||||
import functools
|
import functools
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
from uds import models
|
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
class LogLevel(enum.IntEnum):
|
||||||
|
OTHER = 10000
|
||||||
|
DEBUG = 20000
|
||||||
|
INFO = 30000
|
||||||
|
WARNING = 40000
|
||||||
|
ERROR = 50000
|
||||||
|
CRITICAL = 60000
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_str(level: str) -> 'LogLevel':
|
||||||
|
try:
|
||||||
|
return LogLevel[level.upper()]
|
||||||
|
except Exception:
|
||||||
|
# logger.error('Error getting log level from string: %s', e)
|
||||||
|
return LogLevel.OTHER
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_int(level: int) -> 'LogLevel':
|
||||||
|
try:
|
||||||
|
return LogLevel(level)
|
||||||
|
except ValueError:
|
||||||
|
return LogLevel.OTHER
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_actor_level(level: int) -> 'LogLevel':
|
||||||
|
"""
|
||||||
|
Returns the log level for actor log level
|
||||||
|
"""
|
||||||
|
return [LogLevel.DEBUG, LogLevel.INFO, LogLevel.ERROR, LogLevel.CRITICAL][level % 4]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_logging_level(level: int) -> 'LogLevel':
|
||||||
|
"""
|
||||||
|
Returns the log level for logging log level
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
LogLevel.OTHER,
|
||||||
|
LogLevel.DEBUG,
|
||||||
|
LogLevel.INFO,
|
||||||
|
LogLevel.WARNING,
|
||||||
|
LogLevel.ERROR,
|
||||||
|
LogLevel.CRITICAL,
|
||||||
|
][level // 10]
|
||||||
|
|
||||||
|
# Return all Log levels as tuples of (level value, level name)
|
||||||
|
@staticmethod
|
||||||
|
def all() -> list[tuple[int, str]]:
|
||||||
|
return [(level.value, level.name) for level in LogLevel]
|
||||||
|
|
||||||
|
# Rteturns "interesting" log levels
|
||||||
|
@staticmethod
|
||||||
|
def interesting() -> list[tuple[int, str]]:
|
||||||
|
"""Returns "interesting" log levels
|
||||||
|
|
||||||
|
Interesting log levels are those that are ABOBE INFO level (that is, errors, etc..)
|
||||||
|
"""
|
||||||
|
return [(level.value, level.name) for level in LogLevel if level.value > LogLevel.INFO.value]
|
||||||
|
|
||||||
|
|
||||||
|
class LogSource(enum.StrEnum):
|
||||||
|
INTERNAL = 'internal'
|
||||||
|
ACTOR = 'actor'
|
||||||
|
TRANSPORT = 'transport'
|
||||||
|
OSMANAGER = 'osmanager'
|
||||||
|
UNKNOWN = 'unknown'
|
||||||
|
WEB = 'web'
|
||||||
|
ADMIN = 'admin'
|
||||||
|
SERVICE = 'service'
|
||||||
|
SERVER = 'server'
|
||||||
|
REST = 'rest'
|
||||||
|
LOGS = 'logs'
|
||||||
|
|
||||||
|
|
||||||
# Note: Once assigned a value, do not change it, as it will break the log
|
# Note: Once assigned a value, do not change it, as it will break the log
|
||||||
class LogObjectType(enum.IntEnum):
|
class LogObjectType(enum.IntEnum):
|
||||||
USERSERVICE = 0
|
USERSERVICE = 0
|
||||||
@ -39,18 +117,20 @@ class LogObjectType(enum.IntEnum):
|
|||||||
"""
|
"""
|
||||||
Returns the type of log object from the model
|
Returns the type of log object from the model
|
||||||
"""
|
"""
|
||||||
return _MODEL_TO_TYPE.get(type(model), None)
|
from uds import models
|
||||||
|
|
||||||
# Dict for translations
|
# Dict for translations
|
||||||
_MODEL_TO_TYPE: typing.Final[collections.abc.Mapping[type['Model'], LogObjectType]] = {
|
_MODEL_TO_TYPE: typing.Final[collections.abc.Mapping[type['Model'], 'LogObjectType']] = {
|
||||||
models.UserService: LogObjectType.USERSERVICE,
|
models.UserService: LogObjectType.USERSERVICE,
|
||||||
models.ServicePoolPublication: LogObjectType.PUBLICATION,
|
models.ServicePoolPublication: LogObjectType.PUBLICATION,
|
||||||
models.ServicePool: LogObjectType.SERVICEPOOL,
|
models.ServicePool: LogObjectType.SERVICEPOOL,
|
||||||
models.Service: LogObjectType.SERVICE,
|
models.Service: LogObjectType.SERVICE,
|
||||||
models.Server: LogObjectType.SERVER,
|
models.Server: LogObjectType.SERVER,
|
||||||
models.Provider: LogObjectType.PROVIDER,
|
models.Provider: LogObjectType.PROVIDER,
|
||||||
models.User: LogObjectType.USER,
|
models.User: LogObjectType.USER,
|
||||||
models.Group: LogObjectType.GROUP,
|
models.Group: LogObjectType.GROUP,
|
||||||
models.Authenticator: LogObjectType.AUTHENTICATOR,
|
models.Authenticator: LogObjectType.AUTHENTICATOR,
|
||||||
models.MetaPool: LogObjectType.METAPOOL,
|
models.MetaPool: LogObjectType.METAPOOL,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return _MODEL_TO_TYPE.get(type(model), None)
|
||||||
|
@ -38,7 +38,7 @@ import logging
|
|||||||
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from uds.models.cache import Cache as DBCache
|
from uds.models.cache import Cache as DBCache
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.util import serializer
|
from uds.core.util import serializer
|
||||||
from uds.core import consts
|
from uds.core import consts
|
||||||
|
|
||||||
@ -74,7 +74,7 @@ class Cache:
|
|||||||
return hash_key(self._owner.encode() + key)
|
return hash_key(self._owner.encode() + key)
|
||||||
|
|
||||||
def get(self, skey: typing.Union[str, bytes], default: typing.Any = None) -> typing.Any:
|
def get(self, skey: typing.Union[str, bytes], default: typing.Any = None) -> typing.Any:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
# logger.debug('Requesting key "%s" for cache "%s"', skey, self._owner)
|
# logger.debug('Requesting key "%s" for cache "%s"', skey, self._owner)
|
||||||
try:
|
try:
|
||||||
key = self._get_key(skey)
|
key = self._get_key(skey)
|
||||||
@ -159,7 +159,7 @@ class Cache:
|
|||||||
validity = consts.cache.DEFAULT_CACHE_TIMEOUT
|
validity = consts.cache.DEFAULT_CACHE_TIMEOUT
|
||||||
key = self._get_key(skey)
|
key = self._get_key(skey)
|
||||||
strValue = Cache._serializer(value)
|
strValue = Cache._serializer(value)
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
# Remove existing if any and create a new one
|
# Remove existing if any and create a new one
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
try:
|
try:
|
||||||
@ -200,7 +200,7 @@ class Cache:
|
|||||||
try:
|
try:
|
||||||
key = self._get_key(skey)
|
key = self._get_key(skey)
|
||||||
c = DBCache.objects.get(pk=key)
|
c = DBCache.objects.get(pk=key)
|
||||||
c.created = sql_datetime()
|
c.created = sql_now()
|
||||||
c.save()
|
c.save()
|
||||||
except DBCache.DoesNotExist:
|
except DBCache.DoesNotExist:
|
||||||
logger.debug('Can\'t refresh cache key %s because it doesn\'t exists', skey)
|
logger.debug('Can\'t refresh cache key %s because it doesn\'t exists', skey)
|
||||||
|
@ -40,7 +40,7 @@ import bitarray
|
|||||||
|
|
||||||
from django.core.cache import caches
|
from django.core.cache import caches
|
||||||
|
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
from uds.models.calendar import Calendar
|
from uds.models.calendar import Calendar
|
||||||
|
|
||||||
@ -140,7 +140,7 @@ class CalendarChecker:
|
|||||||
@param dtime: Datetime object to check
|
@param dtime: Datetime object to check
|
||||||
"""
|
"""
|
||||||
if dtime is None:
|
if dtime is None:
|
||||||
dtime = sql_datetime()
|
dtime = sql_now()
|
||||||
|
|
||||||
# memcached access
|
# memcached access
|
||||||
memcache_storage = caches['memory']
|
memcache_storage = caches['memory']
|
||||||
@ -182,7 +182,7 @@ class CalendarChecker:
|
|||||||
"""
|
"""
|
||||||
logger.debug('Obtaining nextEvent')
|
logger.debug('Obtaining nextEvent')
|
||||||
if not check_from:
|
if not check_from:
|
||||||
check_from = sql_datetime()
|
check_from = sql_now()
|
||||||
|
|
||||||
if not offset:
|
if not offset:
|
||||||
offset = datetime.timedelta(minutes=0)
|
offset = datetime.timedelta(minutes=0)
|
||||||
|
@ -319,7 +319,7 @@ def cached(
|
|||||||
|
|
||||||
# Execute the function outside the DB transaction
|
# Execute the function outside the DB transaction
|
||||||
t = time.thread_time_ns()
|
t = time.thread_time_ns()
|
||||||
data = fnc(*args, **kwargs)
|
data = fnc(*args, **kwargs) # pyright: ignore # For some reason, pyright does not like this line
|
||||||
exec_time += time.thread_time_ns() - t
|
exec_time += time.thread_time_ns() - t
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -34,11 +34,12 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import typing
|
import typing
|
||||||
import enum
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
|
from uds.core.types.log import LogLevel, LogSource
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from systemd import journal
|
from systemd import journal
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -59,85 +60,6 @@ LOGLEVEL_PATTERN: typing.Final[typing.Pattern[str]] = re.compile(r'^(DEBUG|INFO|
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
class LogLevel(enum.IntEnum):
|
|
||||||
OTHER = 10000
|
|
||||||
DEBUG = 20000
|
|
||||||
INFO = 30000
|
|
||||||
WARNING = 40000
|
|
||||||
ERROR = 50000
|
|
||||||
CRITICAL = 60000
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_str(level: str) -> 'LogLevel':
|
|
||||||
try:
|
|
||||||
return LogLevel[level.upper()]
|
|
||||||
except Exception:
|
|
||||||
# logger.error('Error getting log level from string: %s', e)
|
|
||||||
return LogLevel.OTHER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_int(level: int) -> 'LogLevel':
|
|
||||||
try:
|
|
||||||
return LogLevel(level)
|
|
||||||
except ValueError:
|
|
||||||
return LogLevel.OTHER
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_actor_level(level: int) -> 'LogLevel':
|
|
||||||
"""
|
|
||||||
Returns the log level for actor log level
|
|
||||||
"""
|
|
||||||
return [LogLevel.DEBUG, LogLevel.INFO, LogLevel.ERROR, LogLevel.CRITICAL][level % 4]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_logging_level(level: int) -> 'LogLevel':
|
|
||||||
"""
|
|
||||||
Returns the log level for logging log level
|
|
||||||
"""
|
|
||||||
return [
|
|
||||||
LogLevel.OTHER,
|
|
||||||
LogLevel.DEBUG,
|
|
||||||
LogLevel.INFO,
|
|
||||||
LogLevel.WARNING,
|
|
||||||
LogLevel.ERROR,
|
|
||||||
LogLevel.CRITICAL,
|
|
||||||
][level // 10]
|
|
||||||
|
|
||||||
# Return all Log levels as tuples of (level value, level name)
|
|
||||||
@staticmethod
|
|
||||||
def all() -> list[tuple[int, str]]:
|
|
||||||
return [(level.value, level.name) for level in LogLevel]
|
|
||||||
|
|
||||||
# Rteturns "interesting" log levels
|
|
||||||
@staticmethod
|
|
||||||
def interesting() -> list[tuple[int, str]]:
|
|
||||||
"""Returns "interesting" log levels
|
|
||||||
|
|
||||||
Interesting log levels are those that are ABOBE INFO level (that is, errors, etc..)
|
|
||||||
"""
|
|
||||||
return [(level.value, level.name) for level in LogLevel if level.value > LogLevel.INFO.value]
|
|
||||||
|
|
||||||
|
|
||||||
class LogSource(enum.StrEnum):
|
|
||||||
INTERNAL = 'internal'
|
|
||||||
ACTOR = 'actor'
|
|
||||||
TRANSPORT = 'transport'
|
|
||||||
OSMANAGER = 'osmanager'
|
|
||||||
UNKNOWN = 'unknown'
|
|
||||||
WEB = 'web'
|
|
||||||
ADMIN = 'admin'
|
|
||||||
SERVICE = 'service'
|
|
||||||
SERVER = 'server'
|
|
||||||
REST = 'rest'
|
|
||||||
LOGS = 'logs'
|
|
||||||
|
|
||||||
|
|
||||||
def log_use(
|
def log_use(
|
||||||
type_: str,
|
type_: str,
|
||||||
serviceUniqueId: str,
|
serviceUniqueId: str,
|
||||||
|
@ -87,7 +87,7 @@ class TimeTrack:
|
|||||||
return date
|
return date
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sql_datetime() -> datetime.datetime:
|
def sql_now() -> datetime.datetime:
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
with TimeTrack.lock:
|
with TimeTrack.lock:
|
||||||
diff = now - TimeTrack.last_check
|
diff = now - TimeTrack.last_check
|
||||||
@ -102,11 +102,11 @@ class TimeTrack:
|
|||||||
return TimeTrack.cached_time + (now - TimeTrack.last_check)
|
return TimeTrack.cached_time + (now - TimeTrack.last_check)
|
||||||
|
|
||||||
|
|
||||||
def sql_datetime() -> datetime.datetime:
|
def sql_now() -> datetime.datetime:
|
||||||
"""Returns the current date/time of the database server.
|
"""Returns the current date/time of the database server.
|
||||||
Has been updated to use TimeTrack, which reduces the queries to database to get the current time
|
Has been updated to use TimeTrack, which reduces the queries to database to get the current time
|
||||||
"""
|
"""
|
||||||
return TimeTrack.sql_datetime()
|
return TimeTrack.sql_now()
|
||||||
|
|
||||||
|
|
||||||
def sql_stamp_seconds() -> int:
|
def sql_stamp_seconds() -> int:
|
||||||
@ -115,7 +115,7 @@ def sql_stamp_seconds() -> int:
|
|||||||
Returns:
|
Returns:
|
||||||
int: Unix timestamp
|
int: Unix timestamp
|
||||||
"""
|
"""
|
||||||
return int(mktime(sql_datetime().timetuple()))
|
return int(mktime(sql_now().timetuple()))
|
||||||
|
|
||||||
|
|
||||||
def sql_stamp() -> float:
|
def sql_stamp() -> float:
|
||||||
@ -124,7 +124,7 @@ def sql_stamp() -> float:
|
|||||||
Returns:
|
Returns:
|
||||||
float: Unix timestamp
|
float: Unix timestamp
|
||||||
"""
|
"""
|
||||||
return float(mktime(sql_datetime().timetuple())) + sql_datetime().microsecond / 1000000.0
|
return float(mktime(sql_now().timetuple())) + sql_now().microsecond / 1000000.0
|
||||||
|
|
||||||
|
|
||||||
def generate_uuid(obj: typing.Any = None) -> str:
|
def generate_uuid(obj: typing.Any = None) -> str:
|
||||||
|
@ -34,12 +34,13 @@ from datetime import timedelta
|
|||||||
|
|
||||||
from django.db.models import Q, Count
|
from django.db.models import Q, Count
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.models import ServicePool
|
from uds.models import ServicePool
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -50,7 +51,7 @@ class AssignedAndUnused(Job):
|
|||||||
friendly_name = 'Unused services checker'
|
friendly_name = 'Unused services checker'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
since_state = sql_datetime() - timedelta(
|
since_state = sql_now() - timedelta(
|
||||||
seconds=GlobalConfig.CHECK_UNUSED_TIME.as_int()
|
seconds=GlobalConfig.CHECK_UNUSED_TIME.as_int()
|
||||||
)
|
)
|
||||||
# Locate service pools with pending assigned service in use
|
# Locate service pools with pending assigned service in use
|
||||||
@ -93,8 +94,8 @@ class AssignedAndUnused(Job):
|
|||||||
)
|
)
|
||||||
log.log(
|
log.log(
|
||||||
us,
|
us,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
source=log.LogSource.SERVER,
|
source=types.log.LogSource.SERVER,
|
||||||
message='Removing unused assigned service',
|
message='Removing unused assigned service',
|
||||||
)
|
)
|
||||||
us.release()
|
us.release()
|
||||||
|
@ -33,10 +33,11 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from django.db.models import Q, Count
|
from django.db.models import Q, Count
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.models import ServicePool, UserService
|
from uds.models import ServicePool, UserService
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
|
|
||||||
@ -49,15 +50,15 @@ class HangedCleaner(Job):
|
|||||||
friendly_name = 'Hanged services checker'
|
friendly_name = 'Hanged services checker'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
since_state = now - timedelta(
|
since_state = now - timedelta(
|
||||||
seconds=GlobalConfig.MAX_INITIALIZING_TIME.as_int()
|
seconds=GlobalConfig.MAX_INITIALIZING_TIME.as_int()
|
||||||
)
|
)
|
||||||
since_removing = now - timedelta(seconds=GlobalConfig.MAX_REMOVAL_TIME.as_int())
|
since_removing = now - timedelta(seconds=GlobalConfig.MAX_REMOVAL_TIME.as_int())
|
||||||
# Filter for locating machine not ready
|
# Filter for locating machine not ready
|
||||||
flt = Q(state_date__lt=since_state, state=State.PREPARING) | Q(
|
flt = Q(state_date__lt=since_state, state=types.states.State.PREPARING) | Q(
|
||||||
state_date__lt=since_state, state=State.USABLE, os_state=State.PREPARING
|
state_date__lt=since_state, state=types.states.State.USABLE, os_state=types.states.State.PREPARING
|
||||||
) | Q(state_date__lt=since_removing, state__in=[State.REMOVING, State.CANCELING])
|
) | Q(state_date__lt=since_removing, state__in=[types.states.State.REMOVING, types.states.State.CANCELING])
|
||||||
|
|
||||||
servicepools_with_hanged = (
|
servicepools_with_hanged = (
|
||||||
ServicePool.objects.annotate(
|
ServicePool.objects.annotate(
|
||||||
@ -66,22 +67,22 @@ class HangedCleaner(Job):
|
|||||||
# Rewrited Filter for servicePool
|
# Rewrited Filter for servicePool
|
||||||
filter=Q(
|
filter=Q(
|
||||||
userServices__state_date__lt=since_state,
|
userServices__state_date__lt=since_state,
|
||||||
userServices__state=State.PREPARING,
|
userServices__state=types.states.State.PREPARING,
|
||||||
)
|
)
|
||||||
| Q(
|
| Q(
|
||||||
userServices__state_date__lt=since_state,
|
userServices__state_date__lt=since_state,
|
||||||
userServices__state=State.USABLE,
|
userServices__state=types.states.State.USABLE,
|
||||||
userServices__os_state=State.PREPARING,
|
userServices__os_state=types.states.State.PREPARING,
|
||||||
)
|
)
|
||||||
| Q(
|
| Q(
|
||||||
userServices__state_date__lt=since_removing,
|
userServices__state_date__lt=since_removing,
|
||||||
userServices__state__in=[State.REMOVING, State.CANCELING],
|
userServices__state__in=[types.states.State.REMOVING, types.states.State.CANCELING],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.exclude(hanged=0)
|
.exclude(hanged=0)
|
||||||
.exclude(service__provider__maintenance_mode=True)
|
.exclude(service__provider__maintenance_mode=True)
|
||||||
.filter(state=State.ACTIVE)
|
.filter(state=types.states.State.ACTIVE)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Type
|
# Type
|
||||||
@ -95,30 +96,30 @@ class HangedCleaner(Job):
|
|||||||
continue
|
continue
|
||||||
logger.debug('Found hanged service %s', us)
|
logger.debug('Found hanged service %s', us)
|
||||||
if (
|
if (
|
||||||
us.state in [State.REMOVING, State.CANCELING]
|
us.state in [types.states.State.REMOVING, types.states.State.CANCELING]
|
||||||
): # Removing too long, remark it as removable
|
): # Removing too long, remark it as removable
|
||||||
log.log(
|
log.log(
|
||||||
us,
|
us,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
'User Service hanged on removal process. Restarting removal.',
|
'User Service hanged on removal process. Restarting removal.',
|
||||||
log.LogSource.INTERNAL,
|
types.log.LogSource.INTERNAL,
|
||||||
)
|
)
|
||||||
log.log(
|
log.log(
|
||||||
servicePool,
|
servicePool,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
f'User service {us.friendly_name} hanged on removal. Restarting removal.',
|
f'User service {us.friendly_name} hanged on removal. Restarting removal.',
|
||||||
)
|
)
|
||||||
us.release() # Mark it again as removable, and let's see
|
us.release() # Mark it again as removable, and let's see
|
||||||
else:
|
else:
|
||||||
log.log(
|
log.log(
|
||||||
us,
|
us,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
'User Service seems to be hanged. Removing it.',
|
'User Service seems to be hanged. Removing it.',
|
||||||
log.LogSource.INTERNAL,
|
types.log.LogSource.INTERNAL,
|
||||||
)
|
)
|
||||||
log.log(
|
log.log(
|
||||||
servicePool,
|
servicePool,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
f'Removing user service {us.friendly_name} because it seems to be hanged'
|
f'Removing user service {us.friendly_name} because it seems to be hanged'
|
||||||
)
|
)
|
||||||
us.release_or_cancel()
|
us.release_or_cancel()
|
||||||
|
@ -36,7 +36,7 @@ import collections.abc
|
|||||||
from uds.core.managers import publication_manager
|
from uds.core.managers import publication_manager
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.models import ServicePoolPublication
|
from uds.models import ServicePoolPublication
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.services.exceptions import PublishException
|
from uds.core.services.exceptions import PublishException
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
@ -52,7 +52,7 @@ class PublicationInfoItemsCleaner(Job):
|
|||||||
friendly_name = 'Publications Info Cleaner'
|
friendly_name = 'Publications Info Cleaner'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
removeFrom = sql_datetime() - timedelta(
|
removeFrom = sql_now() - timedelta(
|
||||||
seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True)
|
seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True)
|
||||||
)
|
)
|
||||||
ServicePoolPublication.objects.filter(
|
ServicePoolPublication.objects.filter(
|
||||||
|
@ -34,7 +34,7 @@ import logging
|
|||||||
from uds.core import types
|
from uds.core import types
|
||||||
|
|
||||||
from uds.models import CalendarAction
|
from uds.models import CalendarAction
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -49,7 +49,7 @@ class ScheduledAction(Job):
|
|||||||
for configuredAction in CalendarAction.objects.filter(
|
for configuredAction in CalendarAction.objects.filter(
|
||||||
service_pool__service__provider__maintenance_mode=False, # Avoid maintenance
|
service_pool__service__provider__maintenance_mode=False, # Avoid maintenance
|
||||||
service_pool__state=types.states.State.ACTIVE, # Avoid Non active pools
|
service_pool__state=types.states.State.ACTIVE, # Avoid Non active pools
|
||||||
next_execution__lt=sql_datetime(),
|
next_execution__lt=sql_now(),
|
||||||
).order_by('next_execution'):
|
).order_by('next_execution'):
|
||||||
logger.info(
|
logger.info(
|
||||||
'Executing calendar action %s.%s (%s)',
|
'Executing calendar action %s.%s (%s)',
|
||||||
|
@ -36,7 +36,7 @@ import logging
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from uds.models import Scheduler
|
from uds.models import Scheduler
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
|
|
||||||
@ -57,7 +57,7 @@ class SchedulerHousekeeping(Job):
|
|||||||
"""
|
"""
|
||||||
Look for "hanged" scheduler tasks and reschedule them
|
Look for "hanged" scheduler tasks and reschedule them
|
||||||
"""
|
"""
|
||||||
since = sql_datetime() - timedelta(minutes=MAX_EXECUTION_MINUTES)
|
since = sql_now() - timedelta(minutes=MAX_EXECUTION_MINUTES)
|
||||||
for _ in range(3): # Retry three times in case of lockout error
|
for _ in range(3): # Retry three times in case of lockout error
|
||||||
try:
|
try:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
|
@ -36,7 +36,7 @@ import collections.abc
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.models import ServicePool, UserService
|
from uds.models import ServicePool, UserService
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ class DeployedServiceInfoItemsCleaner(Job):
|
|||||||
friendly_name = 'Deployed Service Info Cleaner'
|
friendly_name = 'Deployed Service Info Cleaner'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
removeFrom = sql_datetime() - timedelta(
|
removeFrom = sql_now() - timedelta(
|
||||||
seconds=GlobalConfig.KEEP_INFO_TIME.as_int()
|
seconds=GlobalConfig.KEEP_INFO_TIME.as_int()
|
||||||
)
|
)
|
||||||
ServicePool.objects.filter(
|
ServicePool.objects.filter(
|
||||||
@ -91,13 +91,13 @@ class DeployedServiceRemover(Job):
|
|||||||
userService.cancel()
|
userService.cancel()
|
||||||
# Nice start of removal, maybe we need to do some limitation later, but there should not be too much services nor publications cancelable at once
|
# Nice start of removal, maybe we need to do some limitation later, but there should not be too much services nor publications cancelable at once
|
||||||
service_pool.state = State.REMOVING
|
service_pool.state = State.REMOVING
|
||||||
service_pool.state_date = sql_datetime() # Now
|
service_pool.state_date = sql_now() # Now
|
||||||
service_pool.name += ' (removed)'
|
service_pool.name += ' (removed)'
|
||||||
service_pool.save(update_fields=['state', 'state_date', 'name'])
|
service_pool.save(update_fields=['state', 'state_date', 'name'])
|
||||||
|
|
||||||
def continue_removal_of(self, servicePool: ServicePool) -> None:
|
def continue_removal_of(self, servicePool: ServicePool) -> None:
|
||||||
# get current time
|
# get current time
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
|
|
||||||
# Recheck that there is no publication created just after "startRemovalOf"
|
# Recheck that there is no publication created just after "startRemovalOf"
|
||||||
try:
|
try:
|
||||||
@ -194,9 +194,9 @@ class DeployedServiceRemover(Job):
|
|||||||
for servicepool in already_removing_servicepools:
|
for servicepool in already_removing_servicepools:
|
||||||
try:
|
try:
|
||||||
if servicepool.state_date.year == 1972:
|
if servicepool.state_date.year == 1972:
|
||||||
servicepool.state_date = sql_datetime()
|
servicepool.state_date = sql_now()
|
||||||
servicepool.save(update_fields=['state_date'])
|
servicepool.save(update_fields=['state_date'])
|
||||||
if servicepool.state_date < sql_datetime() - timedelta(
|
if servicepool.state_date < sql_now() - timedelta(
|
||||||
seconds=MAX_REMOVING_TIME
|
seconds=MAX_REMOVING_TIME
|
||||||
):
|
):
|
||||||
self.force_removal_of(servicepool) # Force removal
|
self.force_removal_of(servicepool) # Force removal
|
||||||
|
@ -121,9 +121,9 @@ class ServiceCacheUpdater(Job):
|
|||||||
remaining_restraing_time = servicepool.remaining_restraint_time()
|
remaining_restraing_time = servicepool.remaining_restraint_time()
|
||||||
log.log(
|
log.log(
|
||||||
servicepool,
|
servicepool,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Service Pool is restrained due to excesive errors (will be available in {remaining_restraing_time} seconds)',
|
f'Service Pool is restrained due to excesive errors (will be available in {remaining_restraing_time} seconds)',
|
||||||
log.LogSource.INTERNAL,
|
types.log.LogSource.INTERNAL,
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
'%s will be restrained during %s seconds. Will check this later',
|
'%s will be restrained during %s seconds. Will check this later',
|
||||||
@ -308,9 +308,9 @@ class ServiceCacheUpdater(Job):
|
|||||||
except MaxServicesReachedError:
|
except MaxServicesReachedError:
|
||||||
log.log(
|
log.log(
|
||||||
servicepool_stats.servicepool,
|
servicepool_stats.servicepool,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
'Max number of services reached for this service',
|
'Max number of services reached for this service',
|
||||||
log.LogSource.INTERNAL,
|
types.log.LogSource.INTERNAL,
|
||||||
)
|
)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Max user services reached for %s: %s. Cache not created',
|
'Max user services reached for %s: %s. Cache not created',
|
||||||
|
@ -58,7 +58,7 @@ class DeployedServiceStatsCollector(Job):
|
|||||||
service_pool_to_check: collections.abc.Iterable[
|
service_pool_to_check: collections.abc.Iterable[
|
||||||
models.ServicePool
|
models.ServicePool
|
||||||
] = models.ServicePool.objects.filter(state=State.ACTIVE).iterator()
|
] = models.ServicePool.objects.filter(state=State.ACTIVE).iterator()
|
||||||
stamp = model.sql_datetime()
|
stamp = model.sql_now()
|
||||||
# Global counters
|
# Global counters
|
||||||
totalAssigned, totalInUse, totalCached = 0, 0, 0
|
totalAssigned, totalInUse, totalCached = 0, 0, 0
|
||||||
for servicePool in service_pool_to_check:
|
for servicePool in service_pool_to_check:
|
||||||
|
@ -35,9 +35,9 @@ import collections.abc
|
|||||||
|
|
||||||
from django.db.models import Q, Count
|
from django.db.models import Q, Count
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
from uds.models import ServicePool, UserService
|
from uds.models import ServicePool, UserService
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
|
|
||||||
@ -56,7 +56,7 @@ class StuckCleaner(Job):
|
|||||||
friendly_name = 'Stuck States cleaner'
|
friendly_name = 'Stuck States cleaner'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
since_state: datetime = sql_datetime() - timedelta(seconds=MAX_STUCK_TIME)
|
since_state: datetime = sql_now() - timedelta(seconds=MAX_STUCK_TIME)
|
||||||
# Filter for locating machine stuck on removing, cancelling, etc..
|
# Filter for locating machine stuck on removing, cancelling, etc..
|
||||||
# Locate service pools with pending assigned service in use
|
# Locate service pools with pending assigned service in use
|
||||||
servicePoolswithStucks = (
|
servicePoolswithStucks = (
|
||||||
@ -66,13 +66,13 @@ class StuckCleaner(Job):
|
|||||||
filter=Q(userServices__state_date__lt=since_state)
|
filter=Q(userServices__state_date__lt=since_state)
|
||||||
& (
|
& (
|
||||||
Q(
|
Q(
|
||||||
userServices__state=State.PREPARING,
|
userServices__state=types.states.State.PREPARING,
|
||||||
)
|
)
|
||||||
| ~Q(userServices__state__in=State.INFO_STATES + State.VALID_STATES)
|
| ~Q(userServices__state__in=types.states.State.INFO_STATES + types.states.State.VALID_STATES)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.filter(service__provider__maintenance_mode=False, state=State.ACTIVE)
|
.filter(service__provider__maintenance_mode=False, state=types.states.State.ACTIVE)
|
||||||
.exclude(stuckCount=0)
|
.exclude(stuckCount=0)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -81,8 +81,8 @@ class StuckCleaner(Job):
|
|||||||
q = servicePool.userServices.filter(state_date__lt=since_state)
|
q = servicePool.userServices.filter(state_date__lt=since_state)
|
||||||
# Get all that are not in valid or info states, AND the ones that are "PREPARING" with
|
# Get all that are not in valid or info states, AND the ones that are "PREPARING" with
|
||||||
# "destroy_after" property set (exists) (that means that are waiting to be destroyed after initializations)
|
# "destroy_after" property set (exists) (that means that are waiting to be destroyed after initializations)
|
||||||
yield from q.exclude(state__in=State.INFO_STATES + State.VALID_STATES)
|
yield from q.exclude(state__in=types.states.State.INFO_STATES + types.states.State.VALID_STATES)
|
||||||
yield from q.filter(state=State.PREPARING)
|
yield from q.filter(state=types.states.State.PREPARING)
|
||||||
|
|
||||||
for servicepool in servicePoolswithStucks:
|
for servicepool in servicePoolswithStucks:
|
||||||
if servicepool.service.get_instance().allows_errored_userservice_cleanup() is False:
|
if servicepool.service.get_instance().allows_errored_userservice_cleanup() is False:
|
||||||
@ -92,7 +92,7 @@ class StuckCleaner(Job):
|
|||||||
logger.debug('Found stuck user service %s', stuck)
|
logger.debug('Found stuck user service %s', stuck)
|
||||||
log.log(
|
log.log(
|
||||||
servicepool,
|
servicepool,
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
f'User service {stuck.name} has been hard removed because it\'s stuck',
|
f'User service {stuck.name} has been hard removed because it\'s stuck',
|
||||||
)
|
)
|
||||||
# stuck.set_state(State.ERROR)
|
# stuck.set_state(State.ERROR)
|
||||||
|
@ -34,7 +34,7 @@ import logging
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from uds.models import AccountUsage
|
from uds.models import AccountUsage
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -48,7 +48,7 @@ class UsageAccounting(Job):
|
|||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
AccountUsage.objects.select_for_update().filter(
|
AccountUsage.objects.select_for_update().filter(
|
||||||
user_service__in_use=True
|
user_service__in_use=True
|
||||||
).update(end=sql_datetime())
|
).update(end=sql_now())
|
||||||
AccountUsage.objects.select_for_update().filter(
|
AccountUsage.objects.select_for_update().filter(
|
||||||
user_service__in_use=False
|
user_service__in_use=False
|
||||||
).update(
|
).update(
|
||||||
|
@ -38,7 +38,7 @@ from django.db import transaction
|
|||||||
from uds.core.managers.userservice import UserServiceManager
|
from uds.core.managers.userservice import UserServiceManager
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.models import UserService
|
from uds.models import UserService
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.core.jobs import Job
|
from uds.core.jobs import Job
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ class UserServiceInfoItemsCleaner(Job):
|
|||||||
friendly_name = 'User Service Info Cleaner'
|
friendly_name = 'User Service Info Cleaner'
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
remove_since = sql_datetime() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True))
|
remove_since = sql_now() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True))
|
||||||
logger.debug('Removing information user services from %s', remove_since)
|
logger.debug('Removing information user services from %s', remove_since)
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
UserService.objects.select_for_update().filter(
|
UserService.objects.select_for_update().filter(
|
||||||
@ -80,7 +80,7 @@ class UserServiceRemover(Job):
|
|||||||
manager = UserServiceManager()
|
manager = UserServiceManager()
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
removeFrom = sql_datetime() - timedelta(
|
removeFrom = sql_now() - timedelta(
|
||||||
seconds=10
|
seconds=10
|
||||||
) # We keep at least 10 seconds the machine before removing it, so we avoid connections errors
|
) # We keep at least 10 seconds the machine before removing it, so we avoid connections errors
|
||||||
candidates: collections.abc.Iterable[UserService] = UserService.objects.filter(
|
candidates: collections.abc.Iterable[UserService] = UserService.objects.filter(
|
||||||
|
@ -90,8 +90,8 @@ def guacamole(request: ExtendedHttpRequestWithUser, token: str, tunnelId: str) -
|
|||||||
protocol = 'RDS' if 'remote-app' in val else val['protocol'].upper()
|
protocol = 'RDS' if 'remote-app' in val else val['protocol'].upper()
|
||||||
host = val.get('hostname', '0.0.0.0') # nosec: Not a bind, just a placeholder for "no host"
|
host = val.get('hostname', '0.0.0.0') # nosec: Not a bind, just a placeholder for "no host"
|
||||||
msg = f'User {user.name} started HTML5 {protocol} tunnel to {host}.'
|
msg = f'User {user.name} started HTML5 {protocol} tunnel to {host}.'
|
||||||
log.log(user.manager, log.LogLevel.INFO, msg)
|
log.log(user.manager, types.log.LogLevel.INFO, msg)
|
||||||
log.log(userService, log.LogLevel.INFO, msg)
|
log.log(userService, types.log.LogLevel.INFO, msg)
|
||||||
|
|
||||||
events.add_event(
|
events.add_event(
|
||||||
userService.deployed_service,
|
userService.deployed_service,
|
||||||
|
@ -42,7 +42,7 @@ from django.core.management.base import BaseCommand
|
|||||||
|
|
||||||
from uds.core.util import log, model, config
|
from uds.core.util import log, model, config
|
||||||
from uds import models
|
from uds import models
|
||||||
from uds.core.types.states import State
|
from uds.core import types
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -136,7 +136,7 @@ class Command(BaseCommand):
|
|||||||
return f'{cntr:02d}.-{s}'
|
return f'{cntr:02d}.-{s}'
|
||||||
|
|
||||||
max_items = int(options['maxitems'])
|
max_items = int(options['maxitems'])
|
||||||
now = model.sql_datetime()
|
now = model.sql_now()
|
||||||
|
|
||||||
tree: dict[str, typing.Any] = {}
|
tree: dict[str, typing.Any] = {}
|
||||||
try:
|
try:
|
||||||
@ -155,10 +155,10 @@ class Command(BaseCommand):
|
|||||||
userservices: dict[str, typing.Any] = {}
|
userservices: dict[str, typing.Any] = {}
|
||||||
fltr = service_pool.userServices.all()
|
fltr = service_pool.userServices.all()
|
||||||
if not options['alluserservices']:
|
if not options['alluserservices']:
|
||||||
fltr = fltr.filter(state=State.ERROR)
|
fltr = fltr.filter(state=types.states.State.ERROR)
|
||||||
for item in fltr[:max_items]: # at most max_items items
|
for item in fltr[:max_items]: # at most max_items items
|
||||||
logs = [
|
logs = [
|
||||||
f'{l["date"]}: {log.LogLevel.from_int(l["level"])} [{l["source"]}] - {l["message"]}'
|
f'{l["date"]}: {types.log.LogLevel.from_int(l["level"])} [{l["source"]}] - {l["message"]}'
|
||||||
for l in log.get_logs(item)
|
for l in log.get_logs(item)
|
||||||
]
|
]
|
||||||
userservices[item.friendly_name] = {
|
userservices[item.friendly_name] = {
|
||||||
@ -166,8 +166,8 @@ class Command(BaseCommand):
|
|||||||
'id': item.uuid,
|
'id': item.uuid,
|
||||||
'unique_id': item.unique_id,
|
'unique_id': item.unique_id,
|
||||||
'friendly_name': item.friendly_name,
|
'friendly_name': item.friendly_name,
|
||||||
'state': State.from_str(item.state).localized,
|
'state': types.states.State.from_str(item.state).localized,
|
||||||
'os_state': State.from_str(item.os_state).localized,
|
'os_state': types.states.State.from_str(item.os_state).localized,
|
||||||
'state_date': item.state_date,
|
'state_date': item.state_date,
|
||||||
'creation_date': item.creation_date,
|
'creation_date': item.creation_date,
|
||||||
'revision': item.publication and item.publication.revision or '',
|
'revision': item.publication and item.publication.revision or '',
|
||||||
|
@ -39,7 +39,7 @@ import qrcode
|
|||||||
from django.utils.translation import gettext_noop as _, gettext
|
from django.utils.translation import gettext_noop as _, gettext
|
||||||
|
|
||||||
from uds import models
|
from uds import models
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core import mfas, exceptions, types
|
from uds.core import mfas, exceptions, types
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
|
|
||||||
@ -200,7 +200,7 @@ class TOTP_MFA(mfas.MFA):
|
|||||||
|
|
||||||
# Validate code
|
# Validate code
|
||||||
if not self.get_totp(userId, username).verify(
|
if not self.get_totp(userId, username).verify(
|
||||||
code, valid_window=self.valid_window.as_int(), for_time=sql_datetime()
|
code, valid_window=self.valid_window.as_int(), for_time=sql_now()
|
||||||
):
|
):
|
||||||
raise exceptions.auth.MFAError(gettext('Invalid code'))
|
raise exceptions.auth.MFAError(gettext('Invalid code'))
|
||||||
|
|
||||||
|
@ -576,12 +576,12 @@ class Migration(migrations.Migration):
|
|||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='group',
|
model_name='group',
|
||||||
name='created',
|
name='created',
|
||||||
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_datetime),
|
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_now),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name='user',
|
||||||
name='created',
|
name='created',
|
||||||
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_datetime),
|
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_now),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='deployedservice',
|
model_name='deployedservice',
|
||||||
|
@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
|||||||
max_length=128,
|
max_length=128,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
("start", models.DateTimeField(default=uds.core.util.model.sql_datetime)),
|
("start", models.DateTimeField(default=uds.core.util.model.sql_now)),
|
||||||
("end", models.DateTimeField(blank=True, null=True)),
|
("end", models.DateTimeField(blank=True, null=True)),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
|
@ -119,7 +119,7 @@ class IPMachinesService(services.Service):
|
|||||||
if values[0] in (b'v6', b'v7'):
|
if values[0] in (b'v6', b'v7'):
|
||||||
self.lockByExternalAccess.value = gui.as_bool(values[5].decode())
|
self.lockByExternalAccess.value = gui.as_bool(values[5].decode())
|
||||||
if values[0] in (b'v7',):
|
if values[0] in (b'v7',):
|
||||||
self.useRandomIp = gui.as_bool(values[6].decode())
|
self.useRandomIp.value = gui.as_bool(values[6].decode())
|
||||||
|
|
||||||
# Note that will be marshalled as new format, so we don't need to care about old format in code anymore :)
|
# Note that will be marshalled as new format, so we don't need to care about old format in code anymore :)
|
||||||
def post_migrate(self, apps: typing.Any, record: typing.Any) -> None:
|
def post_migrate(self, apps: typing.Any, record: typing.Any) -> None:
|
||||||
|
@ -35,7 +35,7 @@ from django.db import models
|
|||||||
|
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from .tag import TaggingMixin
|
from .tag import TaggingMixin
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
from ..core.consts import NEVER
|
from ..core.consts import NEVER
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -62,7 +62,7 @@ class Account(UUIDModel, TaggingMixin):
|
|||||||
if hasattr(userService, 'accounting'): # Already has an account
|
if hasattr(userService, 'accounting'): # Already has an account
|
||||||
return None
|
return None
|
||||||
|
|
||||||
start = sql_datetime()
|
start = sql_now()
|
||||||
|
|
||||||
if userService.user:
|
if userService.user:
|
||||||
userName = userService.user.pretty_name
|
userName = userService.user.pretty_name
|
||||||
@ -88,7 +88,7 @@ class Account(UUIDModel, TaggingMixin):
|
|||||||
|
|
||||||
tmp = userService.accounting
|
tmp = userService.accounting
|
||||||
tmp.user_service = None
|
tmp.user_service = None
|
||||||
tmp.end = sql_datetime()
|
tmp.end = sql_now()
|
||||||
tmp.save()
|
tmp.save()
|
||||||
return tmp
|
return tmp
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ import logging
|
|||||||
|
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
|
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -69,14 +69,14 @@ class Cache(models.Model):
|
|||||||
"""
|
"""
|
||||||
Purges the cache items that are no longer vaild.
|
Purges the cache items that are no longer vaild.
|
||||||
"""
|
"""
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
for v in Cache.objects.all():
|
for v in Cache.objects.all():
|
||||||
if now > v.created + timedelta(seconds=v.validity):
|
if now > v.created + timedelta(seconds=v.validity):
|
||||||
v.delete()
|
v.delete()
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
if sql_datetime() > (self.created + timedelta(seconds=self.validity)):
|
if sql_now() > (self.created + timedelta(seconds=self.validity)):
|
||||||
expired = "Expired"
|
expired = "Expired"
|
||||||
else:
|
else:
|
||||||
expired = "Active"
|
expired = "Active"
|
||||||
|
@ -42,13 +42,12 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from uds.core.util import calendar
|
from uds.core.util import calendar
|
||||||
from uds.core.util import log
|
|
||||||
from uds.core.managers.userservice import UserServiceManager
|
from uds.core.managers.userservice import UserServiceManager
|
||||||
from uds.core import types, consts
|
from uds.core import types, consts
|
||||||
|
|
||||||
from .calendar import Calendar
|
from .calendar import Calendar
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
from .service_pool import ServicePool
|
from .service_pool import ServicePool
|
||||||
from .transport import Transport
|
from .transport import Transport
|
||||||
from .authenticator import Authenticator
|
from .authenticator import Authenticator
|
||||||
@ -136,7 +135,7 @@ class CalendarAction(UUIDModel):
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.last_execution = sql_datetime()
|
self.last_execution = sql_now()
|
||||||
params = json.loads(self.params)
|
params = json.loads(self.params)
|
||||||
|
|
||||||
should_save_servicepool = save
|
should_save_servicepool = save
|
||||||
@ -175,7 +174,7 @@ class CalendarAction(UUIDModel):
|
|||||||
|
|
||||||
def _remove_stuck_userservice() -> None:
|
def _remove_stuck_userservice() -> None:
|
||||||
# 1.- Remove stuck assigned services (Ignore "creating ones", just for created)
|
# 1.- Remove stuck assigned services (Ignore "creating ones", just for created)
|
||||||
since = sql_datetime() - datetime.timedelta(hours=_numeric_value('hours'))
|
since = sql_now() - datetime.timedelta(hours=_numeric_value('hours'))
|
||||||
for userService in self.service_pool.assigned_user_services().filter(
|
for userService in self.service_pool.assigned_user_services().filter(
|
||||||
state_date__lt=since, state=types.states.State.USABLE
|
state_date__lt=since, state=types.states.State.USABLE
|
||||||
):
|
):
|
||||||
@ -273,7 +272,7 @@ class CalendarAction(UUIDModel):
|
|||||||
|
|
||||||
self.service_pool.log(
|
self.service_pool.log(
|
||||||
f'Executed action {description} [{self.pretty_params}]',
|
f'Executed action {description} [{self.pretty_params}]',
|
||||||
level=log.LogLevel.INFO,
|
level=types.log.LogLevel.INFO,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.service_pool.log(f'Error executing scheduled action {description} [{self.pretty_params}]')
|
self.service_pool.log(f'Error executing scheduled action {description} [{self.pretty_params}]')
|
||||||
@ -286,7 +285,7 @@ class CalendarAction(UUIDModel):
|
|||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
||||||
last_execution = self.last_execution or sql_datetime()
|
last_execution = self.last_execution or sql_now()
|
||||||
possibleNext = calendar.CalendarChecker(self.calendar).next_event(
|
possibleNext = calendar.CalendarChecker(self.calendar).next_event(
|
||||||
check_from=last_execution - self.offset, start_event=self.at_start
|
check_from=last_execution - self.offset, start_event=self.at_start
|
||||||
)
|
)
|
||||||
|
@ -43,7 +43,7 @@ from dateutil import rrule as rules
|
|||||||
|
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from .calendar import Calendar
|
from .calendar import Calendar
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -188,7 +188,7 @@ class CalendarRule(UUIDModel):
|
|||||||
|
|
||||||
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
||||||
logger.debug('Saving...')
|
logger.debug('Saving...')
|
||||||
self.calendar.modified = sql_datetime()
|
self.calendar.modified = sql_now()
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
# Ensure saves associated calendar, so next execution of actions is updated with rule values
|
# Ensure saves associated calendar, so next execution of actions is updated with rule values
|
||||||
|
@ -41,7 +41,7 @@ from uds.core.util import log
|
|||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from .authenticator import Authenticator
|
from .authenticator import Authenticator
|
||||||
from .user import User
|
from .user import User
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -69,7 +69,7 @@ class Group(UUIDModel):
|
|||||||
# if it is false, the user must belong to ALL of the groups to be considered as belonging to this group
|
# if it is false, the user must belong to ALL of the groups to be considered as belonging to this group
|
||||||
meta_if_any = models.BooleanField(default=False)
|
meta_if_any = models.BooleanField(default=False)
|
||||||
groups: 'models.ManyToManyField[Group, Group]' = models.ManyToManyField('self', symmetrical=False)
|
groups: 'models.ManyToManyField[Group, Group]' = models.ManyToManyField('self', symmetrical=False)
|
||||||
created = models.DateTimeField(default=sql_datetime, blank=True)
|
created = models.DateTimeField(default=sql_now, blank=True)
|
||||||
skip_mfa = models.CharField(max_length=1, default=State.INACTIVE, db_index=True)
|
skip_mfa = models.CharField(max_length=1, default=State.INACTIVE, db_index=True)
|
||||||
|
|
||||||
# "fake" declarations for type checking
|
# "fake" declarations for type checking
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
Author: Adolfo Gómez, dkmaster at dkmon dot com
|
||||||
"""
|
"""
|
||||||
# pyright: reportUnknownMemberType=false, reportAttributeAccessIssue=false,reportUnknownArgumentType=false
|
# pyright: reportUnknownMemberType=false, reportAttributeAccessIssue=false,reportUnknownArgumentType=false
|
||||||
|
# mypy: disable-error-code="attr-defined, no-untyped-call"
|
||||||
import io
|
import io
|
||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
@ -42,7 +43,7 @@ from django.http import HttpResponse
|
|||||||
|
|
||||||
|
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core import consts
|
from uds.core import consts
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -194,7 +195,7 @@ class Image(UUIDModel):
|
|||||||
return HttpResponse(self.thumb, content_type='image/png')
|
return HttpResponse(self.thumb, content_type='image/png')
|
||||||
|
|
||||||
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
|
||||||
self.stamp = sql_datetime()
|
self.stamp = sql_now()
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
|
@ -43,7 +43,7 @@ from uds.core import consts, types
|
|||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
from uds.core.util.calendar import CalendarChecker
|
from uds.core.util.calendar import CalendarChecker
|
||||||
|
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
from .group import Group
|
from .group import Group
|
||||||
from .image import Image
|
from .image import Image
|
||||||
from .service_pool import ServicePool
|
from .service_pool import ServicePool
|
||||||
@ -147,7 +147,7 @@ class MetaPool(UUIDModel, TaggingMixin):
|
|||||||
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
|
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
|
||||||
"""
|
"""
|
||||||
if chkDateTime is None:
|
if chkDateTime is None:
|
||||||
chkDateTime = sql_datetime()
|
chkDateTime = sql_now()
|
||||||
|
|
||||||
access = self.fallbackAccess
|
access = self.fallbackAccess
|
||||||
# Let's see if we can access by current datetime
|
# Let's see if we can access by current datetime
|
||||||
|
@ -43,7 +43,7 @@ from uds.core.types.permissions import PermissionType
|
|||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from .user import User
|
from .user import User
|
||||||
from .group import Group
|
from .group import Group
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -121,7 +121,7 @@ class Permissions(UUIDModel):
|
|||||||
return existing
|
return existing
|
||||||
except Exception: # Does not exists
|
except Exception: # Does not exists
|
||||||
return Permissions.objects.create(
|
return Permissions.objects.create(
|
||||||
created=sql_datetime(),
|
created=sql_now(),
|
||||||
ends=None,
|
ends=None,
|
||||||
user=user,
|
user=user,
|
||||||
group=group,
|
group=group,
|
||||||
|
@ -39,8 +39,8 @@ from django.db.models import Q
|
|||||||
from uds.core import consts, types
|
from uds.core import consts, types
|
||||||
from uds.core.consts import MAC_UNKNOWN
|
from uds.core.consts import MAC_UNKNOWN
|
||||||
from uds.core.types.requests import ExtendedHttpRequest
|
from uds.core.types.requests import ExtendedHttpRequest
|
||||||
from uds.core.util import log, net, properties, resolver
|
from uds.core.util import net, properties, resolver
|
||||||
from uds.core.util.model import sql_stamp, sql_datetime
|
from uds.core.util.model import sql_stamp, sql_now
|
||||||
|
|
||||||
from .tag import TaggingMixin
|
from .tag import TaggingMixin
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
@ -141,9 +141,9 @@ class ServerGroup(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
|||||||
# If not found, try to resolve ip_or_host and search again
|
# If not found, try to resolve ip_or_host and search again
|
||||||
try:
|
try:
|
||||||
ip = resolver.resolve(ip_or_host_or_mac)[0]
|
ip = resolver.resolve(ip_or_host_or_mac)[0]
|
||||||
found = Server.objects.filter(Q(ip=ip) | Q(hostname=ip))
|
found_2 = Server.objects.filter(Q(ip=ip) | Q(hostname=ip))
|
||||||
if found:
|
if found_2:
|
||||||
return found[0]
|
return found_2[0]
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
@ -219,7 +219,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
|||||||
certificate = models.TextField(default='', blank=True)
|
certificate = models.TextField(default='', blank=True)
|
||||||
|
|
||||||
# Log level, so we can filter messages for this server
|
# Log level, so we can filter messages for this server
|
||||||
log_level = models.IntegerField(default=log.LogLevel.ERROR.value)
|
log_level = models.IntegerField(default=types.log.LogLevel.ERROR.value)
|
||||||
|
|
||||||
# Extra data, for server type custom data use (i.e. actor keeps command related data here)
|
# Extra data, for server type custom data use (i.e. actor keeps command related data here)
|
||||||
data: typing.Any = models.JSONField(null=True, blank=True, default=None)
|
data: typing.Any = models.JSONField(null=True, blank=True, default=None)
|
||||||
@ -295,7 +295,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
|||||||
if duration is None:
|
if duration is None:
|
||||||
self.locked_until = None
|
self.locked_until = None
|
||||||
else:
|
else:
|
||||||
self.locked_until = sql_datetime() + duration
|
self.locked_until = sql_now() + duration
|
||||||
self.save(update_fields=['locked_until'])
|
self.save(update_fields=['locked_until'])
|
||||||
|
|
||||||
def interpolate_new_assignation(self) -> None:
|
def interpolate_new_assignation(self) -> None:
|
||||||
@ -319,7 +319,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
|
|||||||
If it is not available, we return False, otherwise True
|
If it is not available, we return False, otherwise True
|
||||||
"""
|
"""
|
||||||
restrainedUntil = datetime.datetime.fromtimestamp(self.properties.get('available', consts.NEVER_UNIX))
|
restrainedUntil = datetime.datetime.fromtimestamp(self.properties.get('available', consts.NEVER_UNIX))
|
||||||
return restrainedUntil > sql_datetime()
|
return restrainedUntil > sql_now()
|
||||||
|
|
||||||
def set_restrained_until(self, value: typing.Optional[datetime.datetime] = None) -> None:
|
def set_restrained_until(self, value: typing.Optional[datetime.datetime] = None) -> None:
|
||||||
"""Sets the availability of this server
|
"""Sets the availability of this server
|
||||||
|
@ -42,7 +42,7 @@ from uds.core import consts, exceptions, types
|
|||||||
from uds.core.environment import Environment
|
from uds.core.environment import Environment
|
||||||
from uds.core.services.exceptions import InvalidServiceException
|
from uds.core.services.exceptions import InvalidServiceException
|
||||||
from uds.core.util import calendar, log, serializer
|
from uds.core.util import calendar, log, serializer
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
from .account import Account
|
from .account import Account
|
||||||
from .group import Group
|
from .group import Group
|
||||||
@ -223,7 +223,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
ServicePool.objects.none()
|
ServicePool.objects.none()
|
||||||
) # Do not perform any restraint check if we set the globalconfig to 0 (or less)
|
) # Do not perform any restraint check if we set the globalconfig to 0 (or less)
|
||||||
|
|
||||||
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
||||||
min_ = GlobalConfig.RESTRAINT_COUNT.as_int()
|
min_ = GlobalConfig.RESTRAINT_COUNT.as_int()
|
||||||
|
|
||||||
res: list[dict[str, typing.Any]] = []
|
res: list[dict[str, typing.Any]] = []
|
||||||
@ -272,7 +272,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
|
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
|
||||||
return False # Do not perform any restraint check if we set the globalconfig to 0 (or less)
|
return False # Do not perform any restraint check if we set the globalconfig to 0 (or less)
|
||||||
|
|
||||||
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
||||||
if (
|
if (
|
||||||
self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
|
self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
|
||||||
>= GlobalConfig.RESTRAINT_COUNT.as_int()
|
>= GlobalConfig.RESTRAINT_COUNT.as_int()
|
||||||
@ -287,14 +287,14 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
|
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
|
||||||
count = self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
|
count = self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
|
||||||
if count < GlobalConfig.RESTRAINT_COUNT.as_int():
|
if count < GlobalConfig.RESTRAINT_COUNT.as_int():
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
return GlobalConfig.RESTRAINT_TIME.as_int() - int(
|
return GlobalConfig.RESTRAINT_TIME.as_int() - int(
|
||||||
(
|
(
|
||||||
sql_datetime()
|
sql_now()
|
||||||
- self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date)
|
- self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date)
|
||||||
.latest('state_date')
|
.latest('state_date')
|
||||||
.state_date
|
.state_date
|
||||||
@ -347,7 +347,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
|
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
|
||||||
"""
|
"""
|
||||||
if check_datetime is None:
|
if check_datetime is None:
|
||||||
check_datetime = sql_datetime()
|
check_datetime = sql_now()
|
||||||
|
|
||||||
access = self.fallbackAccess
|
access = self.fallbackAccess
|
||||||
# Let's see if we can access by current datetime
|
# Let's see if we can access by current datetime
|
||||||
@ -368,7 +368,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
typing.Optional[int] -- [Returns deadline in secods. If no deadline (forever), will return None]
|
typing.Optional[int] -- [Returns deadline in secods. If no deadline (forever), will return None]
|
||||||
"""
|
"""
|
||||||
if check_datetime is None:
|
if check_datetime is None:
|
||||||
check_datetime = sql_datetime()
|
check_datetime = sql_now()
|
||||||
|
|
||||||
if self.is_access_allowed(check_datetime) is False:
|
if self.is_access_allowed(check_datetime) is False:
|
||||||
return -1
|
return -1
|
||||||
@ -425,7 +425,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
self.state = state
|
self.state = state
|
||||||
self.state_date = sql_datetime()
|
self.state_date = sql_now()
|
||||||
if save:
|
if save:
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@ -467,7 +467,7 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
Args:
|
Args:
|
||||||
activePub: Active publication used as "current" publication to make checks
|
activePub: Active publication used as "current" publication to make checks
|
||||||
"""
|
"""
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
nonActivePub: 'ServicePoolPublication'
|
nonActivePub: 'ServicePoolPublication'
|
||||||
userService: 'UserService'
|
userService: 'UserService'
|
||||||
|
|
||||||
@ -684,8 +684,8 @@ class ServicePool(UUIDModel, TaggingMixin):
|
|||||||
return bool(self.service) and self.service.test_connectivity(host, port, timeout)
|
return bool(self.service) and self.service.test_connectivity(host, port, timeout)
|
||||||
|
|
||||||
# Utility for logging
|
# Utility for logging
|
||||||
def log(self, message: str, level: log.LogLevel = log.LogLevel.INFO) -> None:
|
def log(self, message: str, level: types.log.LogLevel = types.log.LogLevel.INFO) -> None:
|
||||||
log.log(self, level, message, log.LogSource.INTERNAL)
|
log.log(self, level, message, types.log.LogSource.INTERNAL)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pre_delete(sender: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=unused-argument
|
def pre_delete(sender: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=unused-argument
|
||||||
|
@ -42,7 +42,7 @@ from uds.core.environment import Environment
|
|||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
|
|
||||||
from .service_pool import ServicePool
|
from .service_pool import ServicePool
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
|
|
||||||
|
|
||||||
@ -182,7 +182,7 @@ class ServicePoolPublication(UUIDModel):
|
|||||||
save: Defaults to true. If false, record will not be saved to db, just modified
|
save: Defaults to true. If false, record will not be saved to db, just modified
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.state_date = sql_datetime()
|
self.state_date = sql_now()
|
||||||
self.state = state
|
self.state = state
|
||||||
self.save(update_fields=['state_date', 'state'])
|
self.save(update_fields=['state_date', 'state'])
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ from django.db import models
|
|||||||
from uds.core.managers.crypto import CryptoManager
|
from uds.core.managers.crypto import CryptoManager
|
||||||
|
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core import consts
|
from uds.core import consts
|
||||||
|
|
||||||
from .user import User
|
from .user import User
|
||||||
@ -108,7 +108,7 @@ class TicketStore(UUIDModel):
|
|||||||
|
|
||||||
return TicketStore.objects.create(
|
return TicketStore.objects.create(
|
||||||
uuid=TicketStore.generate_uuid(),
|
uuid=TicketStore.generate_uuid(),
|
||||||
stamp=sql_datetime(),
|
stamp=sql_now(),
|
||||||
data=data,
|
data=data,
|
||||||
validity=validity,
|
validity=validity,
|
||||||
owner=owner,
|
owner=owner,
|
||||||
@ -134,7 +134,7 @@ class TicketStore(UUIDModel):
|
|||||||
|
|
||||||
t = TicketStore.objects.get(uuid=uuid, owner=owner)
|
t = TicketStore.objects.get(uuid=uuid, owner=owner)
|
||||||
validity = datetime.timedelta(seconds=t.validity)
|
validity = datetime.timedelta(seconds=t.validity)
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
|
|
||||||
logger.debug('Ticket validity: %s %s', t.stamp + validity, now)
|
logger.debug('Ticket validity: %s %s', t.stamp + validity, now)
|
||||||
if t.stamp + validity < now:
|
if t.stamp + validity < now:
|
||||||
@ -206,7 +206,7 @@ class TicketStore(UUIDModel):
|
|||||||
) -> None:
|
) -> None:
|
||||||
try:
|
try:
|
||||||
t = TicketStore.objects.get(uuid=uuid, owner=owner)
|
t = TicketStore.objects.get(uuid=uuid, owner=owner)
|
||||||
t.stamp = sql_datetime()
|
t.stamp = sql_now()
|
||||||
if validity:
|
if validity:
|
||||||
t.validity = validity
|
t.validity = validity
|
||||||
t.save(update_fields=['validity', 'stamp'])
|
t.save(update_fields=['validity', 'stamp'])
|
||||||
@ -283,7 +283,7 @@ class TicketStore(UUIDModel):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cleanup() -> None:
|
def cleanup() -> None:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
for v in TicketStore.objects.all():
|
for v in TicketStore.objects.all():
|
||||||
if now > v.stamp + datetime.timedelta(
|
if now > v.stamp + datetime.timedelta(
|
||||||
seconds=v.validity + 600
|
seconds=v.validity + 600
|
||||||
|
@ -40,7 +40,7 @@ from uds.core.util import log, storage, properties
|
|||||||
|
|
||||||
from .authenticator import Authenticator
|
from .authenticator import Authenticator
|
||||||
from ..core.consts import NEVER
|
from ..core.consts import NEVER
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
from .uuid_model import UUIDModel
|
from .uuid_model import UUIDModel
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
@ -72,7 +72,7 @@ class User(UUIDModel, properties.PropertiesMixin):
|
|||||||
is_admin = models.BooleanField(default=False) # is true, this is a super-admin
|
is_admin = models.BooleanField(default=False) # is true, this is a super-admin
|
||||||
last_access = models.DateTimeField(default=NEVER)
|
last_access = models.DateTimeField(default=NEVER)
|
||||||
parent = models.CharField(max_length=50, default=None, null=True)
|
parent = models.CharField(max_length=50, default=None, null=True)
|
||||||
created = models.DateTimeField(default=sql_datetime, blank=True)
|
created = models.DateTimeField(default=sql_now, blank=True)
|
||||||
|
|
||||||
# "fake" declarations for type checking
|
# "fake" declarations for type checking
|
||||||
# objects: 'models.manager.Manager["User"]'
|
# objects: 'models.manager.Manager["User"]'
|
||||||
@ -128,7 +128,7 @@ class User(UUIDModel, properties.PropertiesMixin):
|
|||||||
"""
|
"""
|
||||||
Updates the last access for this user with the current time of the sql server
|
Updates the last access for this user with the current time of the sql server
|
||||||
"""
|
"""
|
||||||
self.last_access = sql_datetime()
|
self.last_access = sql_now()
|
||||||
self.save(update_fields=['last_access'])
|
self.save(update_fields=['last_access'])
|
||||||
|
|
||||||
def logout(self, request: 'ExtendedHttpRequest') -> types.auth.AuthenticationResult:
|
def logout(self, request: 'ExtendedHttpRequest') -> types.auth.AuthenticationResult:
|
||||||
|
@ -39,7 +39,7 @@ from django.db.models import signals
|
|||||||
from uds.core import types, consts
|
from uds.core import types, consts
|
||||||
from uds.core.environment import Environment
|
from uds.core.environment import Environment
|
||||||
from uds.core.util import log, properties
|
from uds.core.util import log, properties
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.types.states import State
|
from uds.core.types.states import State
|
||||||
from uds.models.service_pool import ServicePool
|
from uds.models.service_pool import ServicePool
|
||||||
from uds.models.service_pool_publication import ServicePoolPublication
|
from uds.models.service_pool_publication import ServicePoolPublication
|
||||||
@ -408,7 +408,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if state != self.state:
|
if state != self.state:
|
||||||
self.state_date = sql_datetime()
|
self.state_date = sql_now()
|
||||||
self.state = state
|
self.state = state
|
||||||
self.save(update_fields=['state', 'state_date'])
|
self.save(update_fields=['state', 'state_date'])
|
||||||
|
|
||||||
@ -423,7 +423,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if state != self.os_state:
|
if state != self.os_state:
|
||||||
self.state_date = sql_datetime()
|
self.state_date = sql_now()
|
||||||
self.os_state = state
|
self.os_state = state
|
||||||
self.save(update_fields=['os_state', 'state_date'])
|
self.save(update_fields=['os_state', 'state_date'])
|
||||||
|
|
||||||
@ -435,7 +435,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
|
|||||||
user: User to assing to (db record)
|
user: User to assing to (db record)
|
||||||
"""
|
"""
|
||||||
self.cache_level = 0
|
self.cache_level = 0
|
||||||
self.state_date = sql_datetime()
|
self.state_date = sql_now()
|
||||||
self.user = user
|
self.user = user
|
||||||
self.save(update_fields=['cache_level', 'state_date', 'user'])
|
self.save(update_fields=['cache_level', 'state_date', 'user'])
|
||||||
|
|
||||||
@ -452,7 +452,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
|
|||||||
from uds.core.managers.userservice import UserServiceManager
|
from uds.core.managers.userservice import UserServiceManager
|
||||||
|
|
||||||
self.in_use = inUse
|
self.in_use = inUse
|
||||||
self.in_use_date = sql_datetime()
|
self.in_use_date = sql_now()
|
||||||
self.save(update_fields=['in_use', 'in_use_date'])
|
self.save(update_fields=['in_use', 'in_use_date'])
|
||||||
|
|
||||||
# Start/stop accounting
|
# Start/stop accounting
|
||||||
@ -618,8 +618,8 @@ class UserService(UUIDModel, properties.PropertiesMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Utility for logging
|
# Utility for logging
|
||||||
def log(self, message: str, level: log.LogLevel = log.LogLevel.INFO) -> None:
|
def log(self, message: str, level: types.log.LogLevel = types.log.LogLevel.INFO) -> None:
|
||||||
log.log(self, level, message, log.LogSource.INTERNAL)
|
log.log(self, level, message, types.log.LogSource.INTERNAL)
|
||||||
|
|
||||||
def test_connectivity(self, host: str, port: 'str|int', timeout:int=4) -> bool:
|
def test_connectivity(self, host: str, port: 'str|int', timeout:int=4) -> bool:
|
||||||
return self.deployed_service.test_connectivity(host, port, timeout)
|
return self.deployed_service.test_connectivity(host, port, timeout)
|
||||||
|
@ -36,7 +36,7 @@ from django.db import models
|
|||||||
|
|
||||||
from uds.core.managers.crypto import CryptoManager
|
from uds.core.managers.crypto import CryptoManager
|
||||||
from .user_service import UserService
|
from .user_service import UserService
|
||||||
from ..core.util.model import sql_datetime
|
from ..core.util.model import sql_now
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -58,7 +58,7 @@ class UserServiceSession(models.Model): # pylint: disable=too-many-public-metho
|
|||||||
session_id = models.CharField(
|
session_id = models.CharField(
|
||||||
max_length=128, db_index=True, default=_session_id_generator, blank=True
|
max_length=128, db_index=True, default=_session_id_generator, blank=True
|
||||||
)
|
)
|
||||||
start = models.DateTimeField(default=sql_datetime)
|
start = models.DateTimeField(default=sql_now)
|
||||||
end = models.DateTimeField(null=True, blank=True)
|
end = models.DateTimeField(null=True, blank=True)
|
||||||
|
|
||||||
user_service = models.ForeignKey(
|
user_service = models.ForeignKey(
|
||||||
@ -88,5 +88,5 @@ class UserServiceSession(models.Model): # pylint: disable=too-many-public-metho
|
|||||||
"""
|
"""
|
||||||
Ends the session
|
Ends the session
|
||||||
"""
|
"""
|
||||||
self.end = sql_datetime()
|
self.end = sql_now()
|
||||||
self.save(update_fields=['end'])
|
self.save(update_fields=['end'])
|
||||||
|
@ -40,7 +40,7 @@ from django.utils.translation import gettext_noop as _
|
|||||||
|
|
||||||
from uds.core import messaging, exceptions, types
|
from uds.core import messaging, exceptions, types
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.util.utils import ignore_exceptions
|
from uds.core.util.utils import ignore_exceptions
|
||||||
|
|
||||||
from . import telegram
|
from . import telegram
|
||||||
@ -162,7 +162,7 @@ class TelegramNotifier(messaging.Notifier):
|
|||||||
return # no access token, no messages
|
return # no access token, no messages
|
||||||
# Time of last retrieve
|
# Time of last retrieve
|
||||||
last_check: typing.Optional[datetime.datetime] = self.storage.read_pickled('last_check')
|
last_check: typing.Optional[datetime.datetime] = self.storage.read_pickled('last_check')
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
|
|
||||||
# If last check is not set, we will set it to now
|
# If last check is not set, we will set it to now
|
||||||
if last_check is None:
|
if last_check is None:
|
||||||
|
@ -119,9 +119,9 @@ class LinuxOsManager(osmanagers.OSManager):
|
|||||||
if self.is_removable_on_logout(userservice):
|
if self.is_removable_on_logout(userservice):
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
'Unused user service for too long. Removing due to OS Manager parameters.',
|
'Unused user service for too long. Removing due to OS Manager parameters.',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
userservice.remove()
|
userservice.remove()
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ import collections.abc
|
|||||||
|
|
||||||
from django.utils.translation import gettext_noop as _
|
from django.utils.translation import gettext_noop as _
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core import exceptions
|
from uds.core import exceptions, types
|
||||||
from uds.core.util import log
|
from uds.core.util import log
|
||||||
|
|
||||||
from .linux_osmanager import LinuxOsManager
|
from .linux_osmanager import LinuxOsManager
|
||||||
@ -91,9 +91,9 @@ class LinuxRandomPassManager(LinuxOsManager):
|
|||||||
service.store_value('linOsRandomPass', randomPass)
|
service.store_value('linOsRandomPass', randomPass)
|
||||||
log.log(
|
log.log(
|
||||||
service,
|
service,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Password set to "{randomPass}"',
|
f'Password set to "{randomPass}"',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
|
|
||||||
return randomPass
|
return randomPass
|
||||||
|
@ -119,9 +119,9 @@ class TestOSManager(osmanagers.OSManager):
|
|||||||
if self.is_removable_on_logout(userservice):
|
if self.is_removable_on_logout(userservice):
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
'Unused user service for too long. Removing due to OS Manager parameters.',
|
'Unused user service for too long. Removing due to OS Manager parameters.',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
userservice.remove()
|
userservice.remove()
|
||||||
|
|
||||||
|
@ -110,9 +110,9 @@ class WindowsOsManager(osmanagers.OSManager):
|
|||||||
if self.is_removable_on_logout(userservice):
|
if self.is_removable_on_logout(userservice):
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
'Unused user service for too long. Removing due to OS Manager parameters.',
|
'Unused user service for too long. Removing due to OS Manager parameters.',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
userservice.remove()
|
userservice.remove()
|
||||||
|
|
||||||
|
@ -275,9 +275,9 @@ class WinDomainOsManager(WindowsOsManager):
|
|||||||
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
|
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
|
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
except ldaputil.ALREADY_EXISTS: # pyright: ignore
|
except ldaputil.ALREADY_EXISTS: # pyright: ignore
|
||||||
# Already added this machine to this group, pass
|
# Already added this machine to this group, pass
|
||||||
@ -291,7 +291,7 @@ class WinDomainOsManager(WindowsOsManager):
|
|||||||
# logger.exception('Ldap Exception caught')
|
# logger.exception('Ldap Exception caught')
|
||||||
|
|
||||||
if error:
|
if error:
|
||||||
log.log(userservice, log.LogLevel.WARNING, error, log.LogSource.OSMANAGER)
|
log.log(userservice, types.log.LogLevel.WARNING, error, types.log.LogSource.OSMANAGER)
|
||||||
logger.error(error)
|
logger.error(error)
|
||||||
|
|
||||||
def release(self, userservice: 'UserService') -> None:
|
def release(self, userservice: 'UserService') -> None:
|
||||||
@ -305,9 +305,9 @@ class WinDomainOsManager(WindowsOsManager):
|
|||||||
# logger.info('Releasing from a not FQDN domain is not supported')
|
# logger.info('Releasing from a not FQDN domain is not supported')
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
"Removing a domain machine form a non FQDN domain is not supported.",
|
"Removing a domain machine form a non FQDN domain is not supported.",
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -317,27 +317,27 @@ class WinDomainOsManager(WindowsOsManager):
|
|||||||
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
|
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
|
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
except ldaputil.LDAPError as e:
|
except ldaputil.LDAPError as e:
|
||||||
# logger.exception('Ldap Exception caught')
|
# logger.exception('Ldap Exception caught')
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Could not remove machine from domain ({e})',
|
f'Could not remove machine from domain ({e})',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# logger.exception('Exception caught')
|
# logger.exception('Exception caught')
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.WARNING,
|
types.log.LogLevel.WARNING,
|
||||||
f'Could not remove machine from domain ({e})',
|
f'Could not remove machine from domain ({e})',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -111,9 +111,9 @@ class WinRandomPassManager(WindowsOsManager):
|
|||||||
userservice.store_value('winOsRandomPass', rnd_password)
|
userservice.store_value('winOsRandomPass', rnd_password)
|
||||||
log.log(
|
log.log(
|
||||||
userservice,
|
userservice,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Password set to "{rnd_password}"',
|
f'Password set to "{rnd_password}"',
|
||||||
log.LogSource.OSMANAGER,
|
types.log.LogSource.OSMANAGER,
|
||||||
)
|
)
|
||||||
return rnd_password
|
return rnd_password
|
||||||
|
|
||||||
|
@ -39,9 +39,9 @@ import typing
|
|||||||
from django.utils.translation import gettext
|
from django.utils.translation import gettext
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from uds.core.types.log import LogObjectType
|
from uds.core import types
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import dateutils, log
|
from uds.core.util import dateutils
|
||||||
from uds.models import Log
|
from uds.models import Log
|
||||||
|
|
||||||
from .base import ListReport
|
from .base import ListReport
|
||||||
@ -88,8 +88,8 @@ class ListReportAuditCSV(ListReport):
|
|||||||
for i in Log.objects.filter(
|
for i in Log.objects.filter(
|
||||||
created__gte=start,
|
created__gte=start,
|
||||||
created__lte=end,
|
created__lte=end,
|
||||||
source=log.LogSource.REST,
|
source=types.log.LogSource.REST,
|
||||||
owner_type=LogObjectType.SYSLOG,
|
owner_type=types.log.LogObjectType.SYSLOG,
|
||||||
).order_by('-created'):
|
).order_by('-created'):
|
||||||
# extract user, method, response_code and request from data field
|
# extract user, method, response_code and request from data field
|
||||||
m = rx.match(i.data)
|
m = rx.match(i.data)
|
||||||
|
@ -38,7 +38,7 @@ import typing
|
|||||||
|
|
||||||
from uds.core import consts, services, types
|
from uds.core import consts, services, types
|
||||||
from uds.core.managers.userservice import UserServiceManager
|
from uds.core.managers.userservice import UserServiceManager
|
||||||
from uds.core.util import autoserializable, log
|
from uds.core.util import autoserializable
|
||||||
from uds.core.util.model import sql_stamp_seconds
|
from uds.core.util.model import sql_stamp_seconds
|
||||||
|
|
||||||
from .jobs import OVirtDeferredRemoval
|
from .jobs import OVirtDeferredRemoval
|
||||||
@ -248,7 +248,7 @@ class OVirtLinkedUserService(services.UserService, autoserializable.AutoSerializ
|
|||||||
|
|
||||||
self.cache.put('ready', '1')
|
self.cache.put('ready', '1')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.ERROR, f'Error on setReady: {e}')
|
self.do_log(types.log.LogLevel.ERROR, f'Error on setReady: {e}')
|
||||||
# Treat as operation done, maybe the machine is ready and we can continue
|
# Treat as operation done, maybe the machine is ready and we can continue
|
||||||
|
|
||||||
return types.states.TaskState.FINISHED
|
return types.states.TaskState.FINISHED
|
||||||
@ -367,7 +367,7 @@ if sys.platform == 'win32':
|
|||||||
"""
|
"""
|
||||||
reason = str(reason)
|
reason = str(reason)
|
||||||
logger.debug('Setting error state, reason: %s', reason)
|
logger.debug('Setting error state, reason: %s', reason)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._vmid != '': # Powers off
|
if self._vmid != '': # Powers off
|
||||||
OVirtDeferredRemoval.remove(self.service().provider(), self._vmid)
|
OVirtDeferredRemoval.remove(self.service().provider(), self._vmid)
|
||||||
@ -606,7 +606,7 @@ if sys.platform == 'win32':
|
|||||||
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
|
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
|
||||||
logger.debug('Time is consumed, falling back to stop')
|
logger.debug('Time is consumed, falling back to stop')
|
||||||
self.do_log(
|
self.do_log(
|
||||||
log.LogLevel.ERROR,
|
types.log.LogLevel.ERROR,
|
||||||
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
|
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
|
||||||
)
|
)
|
||||||
# Not stopped by guest in time, but must be stopped normally
|
# Not stopped by guest in time, but must be stopped normally
|
||||||
|
@ -37,7 +37,7 @@ import collections.abc
|
|||||||
|
|
||||||
from uds.core import services, types
|
from uds.core import services, types
|
||||||
from uds.core.managers.crypto import CryptoManager
|
from uds.core.managers.crypto import CryptoManager
|
||||||
from uds.core.util import log, autoserializable
|
from uds.core.util import autoserializable
|
||||||
from uds.core.util.model import sql_stamp_seconds
|
from uds.core.util.model import sql_stamp_seconds
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
@ -234,7 +234,7 @@ class OpenGnsysUserService(services.UserService, autoserializable.AutoSerializab
|
|||||||
types.states.DeployState.ERROR, so we can do "return self.__error(reason)"
|
types.states.DeployState.ERROR, so we can do "return self.__error(reason)"
|
||||||
"""
|
"""
|
||||||
logger.debug('Setting error state, reason: %s', reason)
|
logger.debug('Setting error state, reason: %s', reason)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._machine_id:
|
if self._machine_id:
|
||||||
try:
|
try:
|
||||||
@ -315,7 +315,7 @@ class OpenGnsysUserService(services.UserService, autoserializable.AutoSerializab
|
|||||||
self._stamp = sql_stamp_seconds()
|
self._stamp = sql_stamp_seconds()
|
||||||
|
|
||||||
self.do_log(
|
self.do_log(
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
f'Reserved machine {self._name}: id: {self._machine_id}, mac: {self._mac}, ip: {self._ip}',
|
f'Reserved machine {self._name}: id: {self._machine_id}, mac: {self._mac}, ip: {self._ip}',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ import typing
|
|||||||
|
|
||||||
from uds.core import jobs
|
from uds.core import jobs
|
||||||
from uds import models
|
from uds import models
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
from .provider import OGProvider
|
from .provider import OGProvider
|
||||||
from .service import OGService
|
from .service import OGService
|
||||||
@ -66,7 +66,7 @@ class OpenGnsysMaintainer(jobs.Job):
|
|||||||
service: models.Service
|
service: models.Service
|
||||||
for service in provider.services.all():
|
for service in provider.services.all():
|
||||||
instance: OGService = typing.cast(OGService, service.get_instance())
|
instance: OGService = typing.cast(OGService, service.get_instance())
|
||||||
since = sql_datetime() - datetime.timedelta(
|
since = sql_now() - datetime.timedelta(
|
||||||
hours=instance.max_reserve_hours.as_int() - 8
|
hours=instance.max_reserve_hours.as_int() - 8
|
||||||
) # If less than 8 hours of reservation...
|
) # If less than 8 hours of reservation...
|
||||||
# Now mark for removal every CACHED service that is about to expire its reservation on OpenGnsys
|
# Now mark for removal every CACHED service that is about to expire its reservation on OpenGnsys
|
||||||
|
@ -37,7 +37,7 @@ import typing
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
|
|
||||||
from uds.core import services, consts, types
|
from uds.core import services, consts, types
|
||||||
from uds.core.util import log, autoserializable
|
from uds.core.util import autoserializable
|
||||||
|
|
||||||
from . import on
|
from . import on
|
||||||
|
|
||||||
@ -147,7 +147,7 @@ class OpenNebulaLiveDeployment(services.UserService, autoserializable.AutoSerial
|
|||||||
|
|
||||||
self.cache.put('ready', '1')
|
self.cache.put('ready', '1')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
|
self.do_log(types.log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
|
||||||
# Treat as operation done, maybe the machine is ready and we can continue
|
# Treat as operation done, maybe the machine is ready and we can continue
|
||||||
|
|
||||||
return types.states.TaskState.FINISHED
|
return types.states.TaskState.FINISHED
|
||||||
@ -252,7 +252,7 @@ class OpenNebulaLiveDeployment(services.UserService, autoserializable.AutoSerial
|
|||||||
"""
|
"""
|
||||||
reason = str(reason)
|
reason = str(reason)
|
||||||
logger.debug('Setting error state, reason: %s', reason)
|
logger.debug('Setting error state, reason: %s', reason)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._vmid: # Powers off & delete it
|
if self._vmid: # Powers off & delete it
|
||||||
try:
|
try:
|
||||||
|
@ -37,7 +37,7 @@ import pickle # nosec: not insecure, we are loading our own data
|
|||||||
import typing
|
import typing
|
||||||
|
|
||||||
from uds.core import consts, services, types
|
from uds.core import consts, services, types
|
||||||
from uds.core.util import autoserializable, log
|
from uds.core.util import autoserializable
|
||||||
|
|
||||||
from .openstack import types as openstack_types
|
from .openstack import types as openstack_types
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ class OpenStackLiveUserService(
|
|||||||
|
|
||||||
self.cache.put('ready', '1')
|
self.cache.put('ready', '1')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
|
self.do_log(types.log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
|
||||||
# Treat as operation done, maybe the machine is ready and we can continue
|
# Treat as operation done, maybe the machine is ready and we can continue
|
||||||
|
|
||||||
return types.states.TaskState.FINISHED
|
return types.states.TaskState.FINISHED
|
||||||
@ -267,7 +267,7 @@ class OpenStackLiveUserService(
|
|||||||
self._queue = [Operation.ERROR]
|
self._queue = [Operation.ERROR]
|
||||||
self._reason = str(reason)
|
self._reason = str(reason)
|
||||||
|
|
||||||
self.do_log(log.LogLevel.ERROR, self._reason)
|
self.do_log(types.log.LogLevel.ERROR, self._reason)
|
||||||
|
|
||||||
if self._vmid:
|
if self._vmid:
|
||||||
# Creating machines should be deleted on error
|
# Creating machines should be deleted on error
|
||||||
@ -278,7 +278,7 @@ class OpenStackLiveUserService(
|
|||||||
logger.warning('Can\t set machine %s state to stopped', self._vmid)
|
logger.warning('Can\t set machine %s state to stopped', self._vmid)
|
||||||
else:
|
else:
|
||||||
self.do_log(
|
self.do_log(
|
||||||
log.LogLevel.INFO, 'Keep on error is enabled, machine will not be marked for deletion'
|
types.log.LogLevel.INFO, 'Keep on error is enabled, machine will not be marked for deletion'
|
||||||
)
|
)
|
||||||
# Fix queue to FINISH and return it
|
# Fix queue to FINISH and return it
|
||||||
self._queue = [Operation.FINISH]
|
self._queue = [Operation.FINISH]
|
||||||
|
@ -37,7 +37,6 @@ from django.utils.translation import gettext_noop as _
|
|||||||
from uds.core import types
|
from uds.core import types
|
||||||
from uds.core.services.generics.fixed.service import FixedService
|
from uds.core.services.generics.fixed.service import FixedService
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import log
|
|
||||||
|
|
||||||
from . import helpers
|
from . import helpers
|
||||||
from .deployment_fixed import OpenStackUserServiceFixed
|
from .deployment_fixed import OpenStackUserServiceFixed
|
||||||
@ -170,7 +169,7 @@ class OpenStackServiceFixed(FixedService): # pylint: disable=too-many-public-me
|
|||||||
break
|
break
|
||||||
except Exception: # Notifies on log, but skipt it
|
except Exception: # Notifies on log, but skipt it
|
||||||
self.provider().do_log(
|
self.provider().do_log(
|
||||||
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
||||||
)
|
)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The service has machines that cannot be checked on openstack (connection error or machine has been deleted): %s',
|
'The service has machines that cannot be checked on openstack (connection error or machine has been deleted): %s',
|
||||||
|
@ -120,7 +120,7 @@ class IPMachinesUserService(services.UserService, autoserializable.AutoSerializa
|
|||||||
|
|
||||||
def _error(self, reason: str) -> types.states.TaskState:
|
def _error(self, reason: str) -> types.states.TaskState:
|
||||||
if self._vmid:
|
if self._vmid:
|
||||||
self.service().unassign(self._vmid)
|
self.service().unlock_server(self._vmid)
|
||||||
self._vmid = ''
|
self._vmid = ''
|
||||||
self._ip = ''
|
self._ip = ''
|
||||||
self._mac = ''
|
self._mac = ''
|
||||||
@ -141,7 +141,7 @@ class IPMachinesUserService(services.UserService, autoserializable.AutoSerializa
|
|||||||
|
|
||||||
def destroy(self) -> types.states.TaskState:
|
def destroy(self) -> types.states.TaskState:
|
||||||
if self._vmid:
|
if self._vmid:
|
||||||
self.service().unassign(self._vmid)
|
self.service().unlock_server(self._vmid)
|
||||||
self._vmid = ''
|
self._vmid = ''
|
||||||
self._ip = ''
|
self._ip = ''
|
||||||
self._mac = ''
|
self._mac = ''
|
||||||
|
@ -37,7 +37,7 @@ from django.utils.translation import gettext_noop as _
|
|||||||
|
|
||||||
from uds.core import exceptions, services, types
|
from uds.core import exceptions, services, types
|
||||||
from uds.core.ui.user_interface import gui
|
from uds.core.ui.user_interface import gui
|
||||||
from uds.core.util import log, net, resolver
|
from uds.core.util import net, resolver
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -126,7 +126,7 @@ class PhysicalMachinesProvider(services.ServiceProvider):
|
|||||||
try:
|
try:
|
||||||
host = resolver.resolve(host)[0]
|
host = resolver.resolve(host)[0]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.WARNING, f'Name {host} could not be resolved')
|
self.do_log(types.log.LogLevel.WARNING, f'Name {host} could not be resolved')
|
||||||
logger.warning('Name %s could not be resolved (%s)', host, e)
|
logger.warning('Name %s could not be resolved (%s)', host, e)
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
@ -42,9 +42,8 @@ from uds import models
|
|||||||
from uds.core import exceptions, types, services
|
from uds.core import exceptions, types, services
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import fields
|
from uds.core.util import fields
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.core.util import security
|
from uds.core.util import security
|
||||||
from uds.core import services
|
|
||||||
|
|
||||||
from .deployment_multi import IPMachinesUserService
|
from .deployment_multi import IPMachinesUserService
|
||||||
|
|
||||||
@ -148,7 +147,7 @@ class IPMachinesService(services.Service):
|
|||||||
return datetime.timedelta(hours=self.max_session_hours.value)
|
return datetime.timedelta(hours=self.max_session_hours.value)
|
||||||
|
|
||||||
def enumerate_assignables(self) -> collections.abc.Iterable[types.ui.ChoiceItem]:
|
def enumerate_assignables(self) -> collections.abc.Iterable[types.ui.ChoiceItem]:
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
return [
|
return [
|
||||||
gui.choice_item(f'{server.host}|{server.mac}', server.uuid)
|
gui.choice_item(f'{server.host}|{server.mac}', server.uuid)
|
||||||
for server in fields.get_server_group_from_field(self.server_group).servers.all()
|
for server in fields.get_server_group_from_field(self.server_group).servers.all()
|
||||||
@ -163,9 +162,9 @@ class IPMachinesService(services.Service):
|
|||||||
) -> types.states.TaskState:
|
) -> types.states.TaskState:
|
||||||
server: 'models.Server' = models.Server.objects.get(uuid=assignable_id)
|
server: 'models.Server' = models.Server.objects.get(uuid=assignable_id)
|
||||||
ipmachine_instance: IPMachinesUserService = typing.cast(IPMachinesUserService, userservice_instance)
|
ipmachine_instance: IPMachinesUserService = typing.cast(IPMachinesUserService, userservice_instance)
|
||||||
if server.locked_until is None or server.locked_until < sql_datetime():
|
if server.locked_until is None or server.locked_until < sql_now():
|
||||||
# Lock the server for 10 year right now...
|
# Lock the server for 10 year right now...
|
||||||
server.locked_until = sql_datetime() + datetime.timedelta(days=365)
|
server.locked_until = sql_now() + datetime.timedelta(days=365)
|
||||||
|
|
||||||
return ipmachine_instance.assign(server.host)
|
return ipmachine_instance.assign(server.host)
|
||||||
|
|
||||||
@ -179,7 +178,7 @@ class IPMachinesService(services.Service):
|
|||||||
if self.randomize_host.as_bool() is True:
|
if self.randomize_host.as_bool() is True:
|
||||||
random.shuffle(list_of_servers) # Reorder the list randomly if required
|
random.shuffle(list_of_servers) # Reorder the list randomly if required
|
||||||
for server in list_of_servers:
|
for server in list_of_servers:
|
||||||
if server.locked_until is None or server.locked_until < sql_datetime():
|
if server.locked_until is None or server.locked_until < sql_now():
|
||||||
return server.uuid
|
return server.uuid
|
||||||
raise exceptions.services.InsufficientResourcesException()
|
raise exceptions.services.InsufficientResourcesException()
|
||||||
|
|
||||||
@ -187,14 +186,14 @@ class IPMachinesService(services.Service):
|
|||||||
server = models.Server.objects.get(uuid=server_uuid)
|
server = models.Server.objects.get(uuid=server_uuid)
|
||||||
return server.host, server.mac
|
return server.host, server.mac
|
||||||
|
|
||||||
def assign(self, server_uuid: str) -> None:
|
def lock_server(self, server_uuid: str) -> None:
|
||||||
try:
|
try:
|
||||||
server = models.Server.objects.get(uuid=server_uuid)
|
server = models.Server.objects.get(uuid=server_uuid)
|
||||||
server.lock(self.get_max_lock_time())
|
server.lock(self.get_max_lock_time())
|
||||||
except models.Server.DoesNotExist:
|
except models.Server.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def unassign(self, server_uuid: str) -> None:
|
def unlock_server(self, server_uuid: str) -> None:
|
||||||
try:
|
try:
|
||||||
server = models.Server.objects.get(uuid=server_uuid)
|
server = models.Server.objects.get(uuid=server_uuid)
|
||||||
server.lock(None)
|
server.lock(None)
|
||||||
@ -209,13 +208,14 @@ class IPMachinesService(services.Service):
|
|||||||
# Maybe, an user has logged in on an unassigned machine
|
# Maybe, an user has logged in on an unassigned machine
|
||||||
# if lockForExternalAccess is enabled, we must lock it
|
# if lockForExternalAccess is enabled, we must lock it
|
||||||
if self.lock_on_external_access.as_bool() is True:
|
if self.lock_on_external_access.as_bool() is True:
|
||||||
self.assign(id)
|
self.do_log(types.log.LogLevel.DEBUG, f'External login detected for {id}, locking machine for {self.get_max_lock_time()} or until logout')
|
||||||
|
self.lock_server(id)
|
||||||
|
|
||||||
def process_logout(self, id: str, remote_login: bool) -> None:
|
def process_logout(self, id: str, remote_login: bool) -> None:
|
||||||
'''
|
'''
|
||||||
Process logout for a machine and release it.
|
Process logout for a machine and release it.
|
||||||
'''
|
'''
|
||||||
self.unassign(id)
|
self.unlock_server(id)
|
||||||
|
|
||||||
def notify_initialization(self, id: str) -> None:
|
def notify_initialization(self, id: str) -> None:
|
||||||
'''
|
'''
|
||||||
@ -223,7 +223,7 @@ class IPMachinesService(services.Service):
|
|||||||
Normally, this means that it's free
|
Normally, this means that it's free
|
||||||
'''
|
'''
|
||||||
logger.debug('Notify initialization for %s: %s', self, id)
|
logger.debug('Notify initialization for %s: %s', self, id)
|
||||||
self.unassign(id)
|
self.unlock_server(id)
|
||||||
|
|
||||||
# Used by actor API. look parent documentation
|
# Used by actor API. look parent documentation
|
||||||
def get_valid_id(self, ids: collections.abc.Iterable[str]) -> typing.Optional[str]:
|
def get_valid_id(self, ids: collections.abc.Iterable[str]) -> typing.Optional[str]:
|
||||||
|
@ -38,7 +38,6 @@ from uds.core import services, types
|
|||||||
from uds.core.services.generics.fixed.service import FixedService
|
from uds.core.services.generics.fixed.service import FixedService
|
||||||
from uds.core.services.generics.fixed.userservice import FixedUserService
|
from uds.core.services.generics.fixed.userservice import FixedUserService
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import log
|
|
||||||
|
|
||||||
from . import helpers
|
from . import helpers
|
||||||
from .deployment_fixed import ProxmoxUserServiceFixed
|
from .deployment_fixed import ProxmoxUserServiceFixed
|
||||||
@ -162,7 +161,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
|
|||||||
name='UDS Snapshot',
|
name='UDS Snapshot',
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
|
self.do_log(types.log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
|
||||||
|
|
||||||
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
|
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
|
||||||
userservice_instance = typing.cast(ProxmoxUserServiceFixed, userservice_instance)
|
userservice_instance = typing.cast(ProxmoxUserServiceFixed, userservice_instance)
|
||||||
@ -176,7 +175,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
|
|||||||
self.provider().restore_snapshot(vmid, name=snapshot.name)
|
self.provider().restore_snapshot(vmid, name=snapshot.name)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
|
self.do_log(types.log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
|
||||||
|
|
||||||
def get_and_assign(self) -> str:
|
def get_and_assign(self) -> str:
|
||||||
found_vmid: typing.Optional[str] = None
|
found_vmid: typing.Optional[str] = None
|
||||||
@ -191,7 +190,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
|
|||||||
break
|
break
|
||||||
except Exception: # Notifies on log, but skipt it
|
except Exception: # Notifies on log, but skipt it
|
||||||
self.provider().do_log(
|
self.provider().do_log(
|
||||||
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
||||||
)
|
)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The service has machines that cannot be checked on proxmox (connection error or machine has been deleted): %s',
|
'The service has machines that cannot be checked on proxmox (connection error or machine has been deleted): %s',
|
||||||
|
@ -38,7 +38,7 @@ from uds.core.services.generics.dynamic.publication import DynamicPublication
|
|||||||
from uds.core.services.generics.dynamic.service import DynamicService
|
from uds.core.services.generics.dynamic.service import DynamicService
|
||||||
from uds.core.services.generics.dynamic.userservice import DynamicUserService
|
from uds.core.services.generics.dynamic.userservice import DynamicUserService
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import validators, log, fields
|
from uds.core.util import validators, fields
|
||||||
|
|
||||||
from . import helpers, jobs
|
from . import helpers, jobs
|
||||||
from .deployment_linked import ProxmoxUserserviceLinked
|
from .deployment_linked import ProxmoxUserserviceLinked
|
||||||
@ -245,7 +245,7 @@ class ProxmoxServiceLinked(DynamicService):
|
|||||||
self.disable_machine_ha(vmid)
|
self.disable_machine_ha(vmid)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning('Exception disabling HA for vm %s: %s', vmid, e)
|
logger.warning('Exception disabling HA for vm %s: %s', vmid, e)
|
||||||
self.do_log(level=log.LogLevel.WARNING, message=f'Exception disabling HA for vm {vmid}: {e}')
|
self.do_log(level=types.log.LogLevel.WARNING, message=f'Exception disabling HA for vm {vmid}: {e}')
|
||||||
|
|
||||||
# And remove it
|
# And remove it
|
||||||
return self.provider().remove_machine(vmid)
|
return self.provider().remove_machine(vmid)
|
||||||
|
@ -36,7 +36,7 @@ import typing
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
|
|
||||||
from uds.core import services, consts, types
|
from uds.core import services, consts, types
|
||||||
from uds.core.util import autoserializable, log
|
from uds.core.util import autoserializable
|
||||||
|
|
||||||
from .xen_client import XenPowerState
|
from .xen_client import XenPowerState
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ class XenLinkedDeployment(services.UserService, autoserializable.AutoSerializabl
|
|||||||
self.cache.put('ready', '1', 30)
|
self.cache.put('ready', '1', 30)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# On case of exception, log an an error and return as if the operation was executed
|
# On case of exception, log an an error and return as if the operation was executed
|
||||||
self.do_log(log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
|
self.do_log(types.log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
|
||||||
# return self.__error('Machine is not available anymore')
|
# return self.__error('Machine is not available anymore')
|
||||||
|
|
||||||
return types.states.TaskState.FINISHED
|
return types.states.TaskState.FINISHED
|
||||||
@ -231,7 +231,7 @@ class XenLinkedDeployment(services.UserService, autoserializable.AutoSerializabl
|
|||||||
|
|
||||||
def _error(self, reason: typing.Any) -> types.states.TaskState:
|
def _error(self, reason: typing.Any) -> types.states.TaskState:
|
||||||
logger.debug('Setting error state, reason: %s', reason)
|
logger.debug('Setting error state, reason: %s', reason)
|
||||||
self.do_log(log.LogLevel.ERROR, reason)
|
self.do_log(types.log.LogLevel.ERROR, reason)
|
||||||
|
|
||||||
if self._vmid != '': # Powers off and delete VM
|
if self._vmid != '': # Powers off and delete VM
|
||||||
try:
|
try:
|
||||||
|
@ -35,7 +35,7 @@ import typing
|
|||||||
|
|
||||||
from uds.core import types
|
from uds.core import types
|
||||||
from uds.core.services.generics.fixed.userservice import FixedUserService, Operation
|
from uds.core.services.generics.fixed.userservice import FixedUserService, Operation
|
||||||
from uds.core.util import log, autoserializable
|
from uds.core.util import autoserializable
|
||||||
|
|
||||||
from . import xen_client
|
from . import xen_client
|
||||||
|
|
||||||
@ -79,7 +79,7 @@ class XenFixedUserService(FixedUserService, autoserializable.AutoSerializable):
|
|||||||
self.cache.put('ready', '1', 30)
|
self.cache.put('ready', '1', 30)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# On case of exception, log an an error and return as if the operation was executed
|
# On case of exception, log an an error and return as if the operation was executed
|
||||||
self.do_log(log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
|
self.do_log(types.log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
|
||||||
# return self.__error('Machine is not available anymore')
|
# return self.__error('Machine is not available anymore')
|
||||||
|
|
||||||
return types.states.TaskState.FINISHED
|
return types.states.TaskState.FINISHED
|
||||||
|
@ -38,7 +38,6 @@ from uds.core import consts, services, types
|
|||||||
from uds.core.services.generics.fixed.service import FixedService
|
from uds.core.services.generics.fixed.service import FixedService
|
||||||
from uds.core.services.generics.fixed.userservice import FixedUserService
|
from uds.core.services.generics.fixed.userservice import FixedUserService
|
||||||
from uds.core.ui import gui
|
from uds.core.ui import gui
|
||||||
from uds.core.util import log
|
|
||||||
from uds.core.util.decorators import cached
|
from uds.core.util.decorators import cached
|
||||||
|
|
||||||
from . import helpers
|
from . import helpers
|
||||||
@ -214,7 +213,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
|
|||||||
name='UDS Snapshot',
|
name='UDS Snapshot',
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
|
self.do_log(types.log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
|
||||||
|
|
||||||
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
|
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
|
||||||
userservice_instance = typing.cast(XenFixedUserService, userservice_instance)
|
userservice_instance = typing.cast(XenFixedUserService, userservice_instance)
|
||||||
@ -228,7 +227,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
|
|||||||
try:
|
try:
|
||||||
userservice_instance._task = self.provider().restore_snapshot(snapshot['id'])
|
userservice_instance._task = self.provider().restore_snapshot(snapshot['id'])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.do_log(log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
|
self.do_log(types.log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
|
||||||
|
|
||||||
def get_and_assign(self) -> str:
|
def get_and_assign(self) -> str:
|
||||||
found_vmid: typing.Optional[str] = None
|
found_vmid: typing.Optional[str] = None
|
||||||
@ -243,7 +242,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
|
|||||||
break
|
break
|
||||||
except Exception: # Notifies on log, but skipt it
|
except Exception: # Notifies on log, but skipt it
|
||||||
self.provider().do_log(
|
self.provider().do_log(
|
||||||
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
|
||||||
)
|
)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The service has machines that cannot be checked on xen (connection error or machine has been deleted): %s',
|
'The service has machines that cannot be checked on xen (connection error or machine has been deleted): %s',
|
||||||
|
@ -40,7 +40,7 @@ from uds import models
|
|||||||
from uds.core import transports, types, ui, consts
|
from uds.core import transports, types, ui, consts
|
||||||
from uds.core.managers.crypto import CryptoManager
|
from uds.core.managers.crypto import CryptoManager
|
||||||
from uds.core.util import fields
|
from uds.core.util import fields
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@ -458,7 +458,7 @@ class HTML5RDPTransport(transports.Transport):
|
|||||||
+ '_'
|
+ '_'
|
||||||
+ sanitize(user.name)
|
+ sanitize(user.name)
|
||||||
+ '/'
|
+ '/'
|
||||||
+ sql_datetime().strftime('%Y%m%d-%H%M')
|
+ sql_now().strftime('%Y%m%d-%H%M')
|
||||||
)
|
)
|
||||||
params['create-recording-path'] = 'true'
|
params['create-recording-path'] = 'true'
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ from uds.core.services.exceptions import (
|
|||||||
)
|
)
|
||||||
from uds.core.util import html
|
from uds.core.util import html
|
||||||
from uds.core.util.config import GlobalConfig
|
from uds.core.util.config import GlobalConfig
|
||||||
from uds.core.util.model import sql_datetime
|
from uds.core.util.model import sql_now
|
||||||
from uds.models import MetaPool, Network, ServicePool, ServicePoolGroup, TicketStore, Transport
|
from uds.models import MetaPool, Network, ServicePool, ServicePoolGroup, TicketStore, Transport
|
||||||
|
|
||||||
# Not imported at runtime, just for type checking
|
# Not imported at runtime, just for type checking
|
||||||
@ -129,7 +129,7 @@ def get_services_info_dict(
|
|||||||
available_metapools = list(
|
available_metapools = list(
|
||||||
MetaPool.metapools_for_groups(groups, request.user)
|
MetaPool.metapools_for_groups(groups, request.user)
|
||||||
) # Pass in user to get "number_assigned" to optimize
|
) # Pass in user to get "number_assigned" to optimize
|
||||||
now = sql_datetime()
|
now = sql_now()
|
||||||
|
|
||||||
# Information for administrators
|
# Information for administrators
|
||||||
nets = ''
|
nets = ''
|
||||||
|
@ -36,6 +36,7 @@ import collections.abc
|
|||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.views.decorators.cache import cache_page, never_cache
|
from django.views.decorators.cache import cache_page, never_cache
|
||||||
|
|
||||||
|
from uds.core import types
|
||||||
from uds.core.auths.auth import web_login_required, web_password
|
from uds.core.auths.auth import web_login_required, web_password
|
||||||
from uds.core.managers.userservice import UserServiceManager
|
from uds.core.managers.userservice import UserServiceManager
|
||||||
from uds.core.types.requests import ExtendedHttpRequest
|
from uds.core.types.requests import ExtendedHttpRequest
|
||||||
@ -199,11 +200,11 @@ def action(request: 'ExtendedHttpRequestWithUser', service_id: str, action_strin
|
|||||||
rebuild = True
|
rebuild = True
|
||||||
log.log(
|
log.log(
|
||||||
userService.deployed_service,
|
userService.deployed_service,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
"Removing User Service {} as requested by {} from {}".format(
|
"Removing User Service {} as requested by {} from {}".format(
|
||||||
userService.friendly_name, request.user.pretty_name, request.ip
|
userService.friendly_name, request.user.pretty_name, request.ip
|
||||||
),
|
),
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
UserServiceManager().request_logoff(userService)
|
UserServiceManager().request_logoff(userService)
|
||||||
userService.release()
|
userService.release()
|
||||||
@ -215,11 +216,11 @@ def action(request: 'ExtendedHttpRequestWithUser', service_id: str, action_strin
|
|||||||
rebuild = True
|
rebuild = True
|
||||||
log.log(
|
log.log(
|
||||||
userService.deployed_service,
|
userService.deployed_service,
|
||||||
log.LogLevel.INFO,
|
types.log.LogLevel.INFO,
|
||||||
"Reseting User Service {} as requested by {} from {}".format(
|
"Reseting User Service {} as requested by {} from {}".format(
|
||||||
userService.friendly_name, request.user.pretty_name, request.ip
|
userService.friendly_name, request.user.pretty_name, request.ip
|
||||||
),
|
),
|
||||||
log.LogSource.WEB,
|
types.log.LogSource.WEB,
|
||||||
)
|
)
|
||||||
# UserServiceManager().requestLogoff(userService)
|
# UserServiceManager().requestLogoff(userService)
|
||||||
UserServiceManager().reset(userService)
|
UserServiceManager().reset(userService)
|
||||||
|
@ -32,7 +32,7 @@ import logging
|
|||||||
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from uds.core.util import log
|
from uds.core import types
|
||||||
|
|
||||||
from ...utils import rest
|
from ...utils import rest
|
||||||
from ...fixtures import servers as servers_fixtures
|
from ...fixtures import servers as servers_fixtures
|
||||||
@ -71,7 +71,7 @@ class ServerEventsLogTest(rest.test.RESTTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
# First call shout have
|
# First call shout have
|
||||||
the_log.assert_any_call(server, log.LogLevel.INFO, 'test message', log.LogSource.SERVER, None)
|
the_log.assert_any_call(server, types.log.LogLevel.INFO, 'test message', types.log.LogSource.SERVER, None)
|
||||||
|
|
||||||
# Now notify to an userService
|
# Now notify to an userService
|
||||||
response = self.client.rest_post(
|
response = self.client.rest_post(
|
||||||
@ -87,7 +87,7 @@ class ServerEventsLogTest(rest.test.RESTTestCase):
|
|||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
the_log.assert_any_call(
|
the_log.assert_any_call(
|
||||||
userService, log.LogLevel.INFO, 'test message userservice', log.LogSource.SERVER, None
|
userService, types.log.LogLevel.INFO, 'test message userservice', types.log.LogSource.SERVER, None
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_event_log_fail(self) -> None:
|
def test_event_log_fail(self) -> None:
|
||||||
|
@ -62,7 +62,7 @@ class ServerRegisterTest(rest.test.RESTTestCase):
|
|||||||
'subtype': crypto.CryptoManager.manager().random_string(10),
|
'subtype': crypto.CryptoManager.manager().random_string(10),
|
||||||
'os': '', # To be set on tests
|
'os': '', # To be set on tests
|
||||||
'hostname': 'test',
|
'hostname': 'test',
|
||||||
'log_level': log.LogLevel.INFO.value,
|
'log_level': types.log.LogLevel.INFO.value,
|
||||||
'mac': random_mac(),
|
'mac': random_mac(),
|
||||||
}
|
}
|
||||||
self.login(as_admin=False) # As staff
|
self.login(as_admin=False) # As staff
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user