1
0
mirror of https://github.com/dkmstr/openuds.git synced 2024-12-22 13:34:04 +03:00

Refactorized log types and fixes

This commit is contained in:
Adolfo Gómez García 2024-04-28 02:21:47 +02:00
parent 6694b9d5bc
commit 53e0cefc21
No known key found for this signature in database
GPG Key ID: DD1ABF20724CDA23
107 changed files with 825 additions and 617 deletions

View File

@ -41,7 +41,7 @@ from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.base import View
from uds.core import consts, exceptions
from uds.core import consts, exceptions, types
from uds.core.util import modfinder
from . import processors, log
@ -164,20 +164,20 @@ class Dispatcher(View):
logger.debug('Path: %s', full_path)
logger.debug('Error: %s', e)
log.log_operation(handler, 400, log.LogLevel.ERROR)
log.log_operation(handler, 400, types.log.LogLevel.ERROR)
return http.HttpResponseBadRequest(
f'Invalid parameters invoking {full_path}: {e}',
content_type="text/plain",
)
except AttributeError:
allowed_methods: list[str] = [n for n in ['get', 'post', 'put', 'delete'] if hasattr(handler, n)]
log.log_operation(handler, 405, log.LogLevel.ERROR)
log.log_operation(handler, 405, types.log.LogLevel.ERROR)
return http.HttpResponseNotAllowed(allowed_methods, content_type="text/plain")
except exceptions.rest.AccessDenied:
log.log_operation(handler, 403, log.LogLevel.ERROR)
log.log_operation(handler, 403, types.log.LogLevel.ERROR)
return http.HttpResponseForbidden('access denied', content_type="text/plain")
except Exception:
log.log_operation(handler, 500, log.LogLevel.ERROR)
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
logger.exception('error accessing attribute')
logger.debug('Getting attribute %s for %s', http_method, full_path)
return http.HttpResponseServerError('Unexcepected error', content_type="text/plain")
@ -206,28 +206,28 @@ class Dispatcher(View):
# Log de operation on the audit log for admin
# Exceptiol will also be logged, but with ERROR level
log.log_operation(handler, response.status_code, log.LogLevel.INFO)
log.log_operation(handler, response.status_code, types.log.LogLevel.INFO)
return response
except exceptions.rest.RequestError as e:
log.log_operation(handler, 400, log.LogLevel.ERROR)
log.log_operation(handler, 400, types.log.LogLevel.ERROR)
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
except exceptions.rest.ResponseError as e:
log.log_operation(handler, 500, log.LogLevel.ERROR)
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
return http.HttpResponseServerError(str(e), content_type="text/plain")
except exceptions.rest.NotSupportedError as e:
log.log_operation(handler, 501, log.LogLevel.ERROR)
log.log_operation(handler, 501, types.log.LogLevel.ERROR)
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
except exceptions.rest.AccessDenied as e:
log.log_operation(handler, 403, log.LogLevel.ERROR)
log.log_operation(handler, 403, types.log.LogLevel.ERROR)
return http.HttpResponseForbidden(str(e), content_type="text/plain")
except exceptions.rest.NotFound as e:
log.log_operation(handler, 404, log.LogLevel.ERROR)
log.log_operation(handler, 404, types.log.LogLevel.ERROR)
return http.HttpResponseNotFound(str(e), content_type="text/plain")
except exceptions.rest.HandlerError as e:
log.log_operation(handler, 500, log.LogLevel.ERROR)
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
return http.HttpResponseBadRequest(str(e), content_type="text/plain")
except Exception as e:
log.log_operation(handler, 500, log.LogLevel.ERROR)
log.log_operation(handler, 500, types.log.LogLevel.ERROR)
# Get ecxeption backtrace
trace_back = traceback.format_exc()
logger.error('Exception processing request: %s', full_path)

View File

@ -44,7 +44,7 @@ from uds.core.managers.userservice import UserServiceManager
from uds.core.util import log, security
from uds.core.util.cache import Cache
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.models import Server, Service, TicketStore, UserService
from uds.models.service import ServiceTokenAlias
@ -194,23 +194,23 @@ class ActorV3Action(Handler):
# ensure idsLists has upper and lower versions for case sensitive databases
idsList = fix_list_of_ids(idsList)
validId: typing.Optional[str] = service.get_valid_id(idsList)
service_id: typing.Optional[str] = service.get_valid_id(idsList)
is_remote = self._params.get('session_type', '')[:4] in ('xrdp', 'RDP-')
# Must be valid
if action in (NotifyActionType.LOGIN, NotifyActionType.LOGOUT):
if not validId: # For login/logout, we need a valid id
if not service_id: # For login/logout, we need a valid id
raise Exception()
# Notify Service that someone logged in/out
if action == NotifyActionType.LOGIN:
# Try to guess if this is a remote session
service.process_login(validId, remote_login=is_remote)
service.process_login(service_id, remote_login=is_remote)
elif action == NotifyActionType.LOGOUT:
service.process_logout(validId, remote_login=is_remote)
service.process_logout(service_id, remote_login=is_remote)
elif action == NotifyActionType.DATA:
service.notify_data(validId, self._params['data'])
service.notify_data(service_id, self._params['data'])
else:
raise Exception('Invalid action')
@ -294,7 +294,7 @@ class Register(ActorV3Action):
'run_once_command': self._params['run_once_command'],
'custom': self._params.get('custom', ''),
})
actor_token.stamp = sql_datetime()
actor_token.stamp = sql_now()
actor_token.save()
logger.info('Registered actor %s', self._params)
found = True
@ -318,7 +318,7 @@ class Register(ActorV3Action):
'version': '',
'os_type': self._params.get('os', types.os.KnownOS.UNKNOWN.os_name()),
'mac': self._params['mac'],
'stamp': sql_datetime(),
'stamp': sql_now(),
}
actor_token = Server.objects.create(**kwargs)
@ -704,14 +704,14 @@ class Log(ActorV3Action):
userservice = self.get_userservice()
if userservice.actor_version < '4.0.0':
# Adjust loglevel to own, we start on 10000 for OTHER, and received is 0 for OTHER
level = log.LogLevel.from_int(int(self._params['level']) + 10000)
level = types.log.LogLevel.from_int(int(self._params['level']) + 10000)
else:
level = log.LogLevel.from_int(int(self._params['level']))
level = types.log.LogLevel.from_int(int(self._params['level']))
log.log(
userservice,
level,
self._params['message'],
log.LogSource.ACTOR,
types.log.LogSource.ACTOR,
)
return ActorV3Action.actor_result('ok')

View File

@ -39,7 +39,7 @@ from django.utils.translation import gettext as _
from uds.core import exceptions
from uds.core.util import ensure, permissions
from uds.core.util.model import process_uuid, sql_datetime
from uds.core.util.model import process_uuid, sql_now
from uds.models.calendar import Calendar
from uds.models.calendar_rule import CalendarRule, FrequencyInfo
from uds.REST.model import DetailHandler
@ -158,7 +158,7 @@ class CalendarRules(DetailHandler): # pylint: disable=too-many-public-methods
logger.debug('Deleting rule %s from %s', item, parent)
try:
calRule = parent.rules.get(uuid=process_uuid(item))
calRule.calendar.modified = sql_datetime()
calRule.calendar.modified = sql_now()
calRule.calendar.save()
calRule.delete()
except Exception as e:

View File

@ -114,10 +114,10 @@ class MetaServicesPool(DetailHandler):
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
("Added" if uuid is None else "Modified")
+ " meta pool member {}/{}/{} by {}".format(pool.name, priority, enabled, self._user.pretty_name),
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
def delete_item(self, parent: 'Model', item: str) -> None:
@ -127,7 +127,7 @@ class MetaServicesPool(DetailHandler):
member.delete()
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
class MetaAssignedService(DetailHandler):
@ -264,7 +264,7 @@ class MetaAssignedService(DetailHandler):
else:
raise self.invalid_item_response(_('Item is not removable'))
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
# Only owner is allowed to change right now
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
@ -296,4 +296,4 @@ class MetaAssignedService(DetailHandler):
userservice.save()
# Log change
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)

View File

@ -110,9 +110,9 @@ class AccessCalendars(DetailHandler):
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'{"Added" if uuid is None else "Updated"} access calendar {calendar.name}/{access} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
def delete_item(self, parent: 'Model', item: str) -> None:
@ -121,7 +121,7 @@ class AccessCalendars(DetailHandler):
logStr = f'Removed access calendar {calendarAccess.calendar.name} by {self._user.pretty_name}'
calendarAccess.delete()
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
class ActionsCalendars(DetailHandler):
@ -222,7 +222,7 @@ class ActionsCalendars(DetailHandler):
params=params,
)
log.log(parent, log.LogLevel.INFO, log_string, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, log_string, types.log.LogSource.ADMIN)
def delete_item(self, parent: 'Model', item: str) -> None:
parent = ensure.is_instance(parent, models.ServicePool)
@ -236,7 +236,7 @@ class ActionsCalendars(DetailHandler):
calendarAction.delete()
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
def execute(self, parent: 'Model', item: str) -> typing.Any:
parent = ensure.is_instance(parent, models.ServicePool)
@ -252,7 +252,7 @@ class ActionsCalendars(DetailHandler):
f'{calendarAction.params}" by {self._user.pretty_name}'
)
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
calendarAction.execute()
return self.success()

View File

@ -38,7 +38,7 @@ from django.utils.translation import gettext_lazy as _
from uds import models
from uds.core import consts, types
from uds.core.exceptions import rest as rest_exceptions
from uds.core.util import decorators, validators, log, model
from uds.core.util import decorators, validators, model
from uds.REST import Handler
from uds.REST.utils import rest_result
@ -50,7 +50,7 @@ logger = logging.getLogger(__name__)
class ServerRegisterBase(Handler):
def post(self) -> collections.abc.MutableMapping[str, typing.Any]:
serverToken: models.Server
now = model.sql_datetime()
now = model.sql_now()
ip = self._params.get('ip', self.request.ip)
if ':' in ip:
# If zone is present, remove it
@ -122,7 +122,7 @@ class ServerRegisterBase(Handler):
listen_port=port,
hostname=self._params['hostname'],
certificate=certificate,
log_level=self._params.get('log_level', log.LogLevel.INFO.value),
log_level=self._params.get('log_level', types.log.LogLevel.INFO.value),
stamp=now,
type=self._params['type'],
subtype=self._params.get('subtype', ''), # Optional

View File

@ -38,7 +38,7 @@ from django.utils.translation import gettext_lazy as _
from uds import models
from uds.core import consts, types, ui
from uds.core.util import net, permissions, ensure
from uds.core.util.model import sql_datetime, process_uuid
from uds.core.util.model import sql_now, process_uuid
from uds.core.exceptions.rest import NotFound, RequestError
from uds.REST.model import DetailHandler, ModelHandler
@ -247,12 +247,12 @@ class ServersServers(DetailHandler):
parent = ensure.is_instance(parent, models.ServerGroup)
# Item is the uuid of the server to add
server: typing.Optional['models.Server'] = None # Avoid warning on reference before assignment
mac: str = ''
if item is None:
# Create new, depending on server type
if parent.type == types.servers.ServerType.UNMANAGED:
# Ensure mac is emty or valid
mac: str = self._params['mac'].strip().upper()
mac = self._params['mac'].strip().upper()
if mac and not net.is_valid_mac(mac):
raise self.invalid_request_response('Invalid MAC address')
# Create a new one, and add it to group
@ -265,7 +265,7 @@ class ServersServers(DetailHandler):
mac=mac,
type=parent.type,
subtype=parent.subtype,
stamp=sql_datetime(),
stamp=sql_now(),
)
# Add to group
parent.servers.add(server)
@ -284,7 +284,7 @@ class ServersServers(DetailHandler):
pass
else:
if parent.type == types.servers.ServerType.UNMANAGED:
mac: str = self._params['mac'].strip().upper()
mac = self._params['mac'].strip().upper()
if mac and not net.is_valid_mac(mac):
raise self.invalid_request_response('Invalid MAC address')
try:
@ -292,9 +292,10 @@ class ServersServers(DetailHandler):
# Update register info also on update
register_username=self._user.name,
register_ip=self._request.ip,
ip=self._params['ip'],
hostname=self._params['hostname'],
ip=self._params['ip'],
mac=mac,
stamp=sql_now(), # Modified now
)
except Exception:
raise self.invalid_item_response() from None

View File

@ -44,7 +44,7 @@ from uds.core.ui import gui
from uds.core.consts.images import DEFAULT_THUMB_BASE64
from uds.core.util import log, permissions, ensure
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_datetime, process_uuid
from uds.core.util.model import sql_now, process_uuid
from uds.core.types.states import State
from uds.models import Account, Image, OSManager, Service, ServicePool, ServicePoolGroup, User
from uds.REST.model import ModelHandler
@ -130,7 +130,7 @@ class ServicesPools(ModelHandler):
self, *args: typing.Any, **kwargs: typing.Any
) -> typing.Generator[types.rest.ItemDictType, None, None]:
# Optimized query, due that there is a lot of info needed for theee
d = sql_datetime() - datetime.timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
d = sql_now() - datetime.timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
return super().get_items(
overview=kwargs.get('overview', True),
query=(

View File

@ -41,7 +41,7 @@ from uds import models
from uds.core import exceptions, types
from uds.core.util import permissions
from uds.core.util.cache import Cache
from uds.core.util.model import process_uuid, sql_datetime
from uds.core.util.model import process_uuid, sql_now
from uds.core.types.states import State
from uds.core.util.stats import counters
from uds.REST import Handler
@ -69,7 +69,7 @@ def get_servicepools_counters(
cacheKey = (
(servicePool and str(servicePool.id) or 'all') + str(counter_type) + str(POINTS) + str(since_days)
)
to = sql_datetime()
to = sql_now()
since: datetime.datetime = to - datetime.timedelta(days=since_days)
cachedValue: typing.Optional[bytes] = cache.get(cacheKey)

View File

@ -104,8 +104,8 @@ class TunnelTicket(Handler):
now = sql_stamp_seconds()
totalTime = now - extra.get('b', now - 1)
msg = f'User {user.name} stopped tunnel {extra.get("t", "")[:8]}... to {host}:{port}: u:{sent}/d:{recv}/t:{totalTime}.'
log.log(user.manager, log.LogLevel.INFO, msg)
log.log(user_service, log.LogLevel.INFO, msg)
log.log(user.manager, types.log.LogLevel.INFO, msg)
log.log(user_service, types.log.LogLevel.INFO, msg)
# Try to log Close event
try:
@ -133,8 +133,8 @@ class TunnelTicket(Handler):
tunnel=self._args[0],
)
msg = f'User {user.name} started tunnel {self._args[0][:8]}... to {host}:{port} from {self._args[1]}.'
log.log(user.manager, log.LogLevel.INFO, msg)
log.log(user_service, log.LogLevel.INFO, msg)
log.log(user.manager, types.log.LogLevel.INFO, msg)
log.log(user_service, types.log.LogLevel.INFO, msg)
# Generate new, notify only, ticket
notifyTicket = models.TicketStore.create_for_tunnel(
userService=user_service,

View File

@ -224,7 +224,7 @@ class AssignedService(DetailHandler):
else:
raise self.invalid_item_response(_('Item is not removable'))
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
# Only owner is allowed to change right now
def save_item(self, parent: 'Model', item: typing.Optional[str]) -> None:
@ -253,7 +253,7 @@ class AssignedService(DetailHandler):
userService.save()
# Log change
log.log(parent, log.LogLevel.INFO, logStr, log.LogSource.ADMIN)
log.log(parent, types.log.LogLevel.INFO, logStr, types.log.LogSource.ADMIN)
def reset(self, parent: 'models.ServicePool', item: str) -> typing.Any:
userService = parent.userServices.get(uuid=process_uuid(item))
@ -376,9 +376,9 @@ class Groups(DetailHandler):
parent.assignedGroups.add(group)
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Added group {group.pretty_name} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
def delete_item(self, parent: 'Model', item: str) -> None:
@ -387,9 +387,9 @@ class Groups(DetailHandler):
parent.assignedGroups.remove(group)
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Removed group {group.pretty_name} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
@ -438,9 +438,9 @@ class Transports(DetailHandler):
parent.transports.add(transport)
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Added transport {transport.name} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
def delete_item(self, parent: 'Model', item: str) -> None:
@ -449,9 +449,9 @@ class Transports(DetailHandler):
parent.transports.remove(transport)
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Removed transport {transport.name} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
@ -482,9 +482,9 @@ class Publications(DetailHandler):
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Initiated publication v{parent.current_pub_revision} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
return self.success()
@ -512,9 +512,9 @@ class Publications(DetailHandler):
log.log(
parent,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Canceled publication v{parent.current_pub_revision} by {self._user.pretty_name}',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
return self.success()

View File

@ -77,7 +77,7 @@ class TokenInfo:
return TokenInfo(
access_token=dct['access_token'],
token_type=dct['token_type'],
expires=model.sql_datetime() + datetime.timedelta(seconds=dct['expires_in'] - 10),
expires=model.sql_now() + datetime.timedelta(seconds=dct['expires_in'] - 10),
refresh_token=dct['refresh_token'],
scope=dct['scope'],
info=dct.get('info', {}),
@ -560,7 +560,7 @@ class OAuth2Authenticator(auths.Authenticator):
token = TokenInfo(
access_token=parameters.get_params.get('access_token', ''),
token_type=parameters.get_params.get('token_type', ''),
expires=model.sql_datetime()
expires=model.sql_now()
+ datetime.timedelta(seconds=int(parameters.get_params.get('expires_in', 0))),
refresh_token=parameters.get_params.get('refresh_token', ''),
scope=parameters.get_params.get('scope', ''),

View File

@ -49,7 +49,7 @@ from uds.core.managers.crypto import CryptoManager
from uds.core.types.requests import ExtendedHttpRequest
from uds.core.ui import gui
from uds.core.util import security, decorators, auth as auth_utils
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
# Not imported at runtime, just for type checking
if typing.TYPE_CHECKING:
@ -551,9 +551,9 @@ class SAMLAuthenticator(auths.Authenticator):
),
# This is a date of end of validity
'metadataValidUntil': (
sql_datetime() + datetime.timedelta(days=self.metadata_validity_duration.as_int())
sql_now() + datetime.timedelta(days=self.metadata_validity_duration.as_int())
if self.metadata_cache_duration.value > 0
else sql_datetime() + datetime.timedelta(days=365 * 10)
else sql_now() + datetime.timedelta(days=365 * 10)
),
'nameIdEncrypted': self.use_name_id_encrypted.as_bool(),
'authnRequestsSigned': self.use_authn_requests_signed.as_bool(),

View File

@ -33,35 +33,26 @@ Provides useful functions for authenticating, used by web interface.
Author: Adolfo Gómez, dkmaster at dkmon dot com
'''
import base64
import codecs
import collections.abc
import logging
import typing
import collections.abc
import codecs
from functools import wraps
from django.http import (
HttpResponseRedirect,
HttpResponseForbidden,
HttpResponse,
HttpRequest,
)
from django.utils.translation import get_language
from django.urls import reverse
from django.http import HttpRequest, HttpResponse, HttpResponseForbidden, HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import get_language
from django.utils.translation import gettext as _
from uds.core import auths, types, exceptions, consts
from uds import models
from uds.core import auths, consts, exceptions, types
from uds.core.auths import Authenticator as AuthenticatorInstance
from uds.core.managers.crypto import CryptoManager
from uds.core.types.requests import ExtendedHttpRequest
from uds.core.util import log
from uds.core.util import net
from uds.core.util import config
from uds.core.types.states import State
from uds.core.util import config, log, net
from uds.core.util.config import GlobalConfig
from uds.core.util.stats import events
from uds.core.types.states import State
from uds.core.managers.crypto import CryptoManager
from uds.core.auths import Authenticator as AuthenticatorInstance
from uds import models
# Not imported at runtime, just for type checking
if typing.TYPE_CHECKING:
@ -396,9 +387,7 @@ def web_login(
Helper function to, once the user is authenticated, store the information at the user session.
@return: Always returns True
"""
from uds import ( # pylint: disable=import-outside-toplevel # to avoid circular imports
REST,
)
from uds import REST # pylint: disable=import-outside-toplevel # to avoid circular imports
if user.id != consts.auth.ROOT_ID: # If not ROOT user (this user is not inside any authenticator)
manager_id = user.manager.id
@ -515,12 +504,12 @@ def log_login(
]
)
)
level = log.LogLevel.INFO if log_string == 'Logged in' else log.LogLevel.ERROR
level = types.log.LogLevel.INFO if log_string == 'Logged in' else types.log.LogLevel.ERROR
log.log(
authenticator,
level,
f'user {userName} has {log_string} from {request.ip} where os is {request.os.os.name}',
log.LogSource.WEB,
types.log.LogSource.WEB,
)
try:
@ -530,7 +519,7 @@ def log_login(
user,
level,
f'{log_string} from {request.ip} where OS is {request.os.os.name}',
log.LogSource.WEB,
types.log.LogSource.WEB,
)
except Exception: # nosec: root user is not on any authenticator, will fail with an exception we can ingore
logger.info('Root %s from %s where OS is %s', log_string, request.ip, request.os.os.name)
@ -541,10 +530,10 @@ def log_logout(request: 'ExtendedHttpRequest') -> None:
if request.user.manager.id:
log.log(
request.user.manager,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'user {request.user.name} has logged out from {request.ip}',
log.LogSource.WEB,
types.log.LogSource.WEB,
)
log.log(request.user, log.LogLevel.INFO, f'has logged out from {request.ip}', log.LogSource.WEB)
log.log(request.user, types.log.LogLevel.INFO, f'has logged out from {request.ip}', types.log.LogSource.WEB)
else:
logger.info('Root has logged out from %s', request.ip)

View File

@ -42,7 +42,7 @@ from django.db import transaction, OperationalError
from django.db.models import Q
from uds.models import DelayedTask as DBDelayedTask
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.environment import Environment
from uds.core.util import singleton
@ -107,7 +107,7 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
return DelayedTaskRunner()
def execute_delayed_task(self) -> None:
now = sql_datetime()
now = sql_now()
filt = Q(execution_time__lt=now) | Q(insert_date__gt=now + timedelta(seconds=30))
# If next execution is before now or last execution is in the future (clock changed on this server, we take that task as executable)
try:
@ -141,7 +141,7 @@ class DelayedTaskRunner(metaclass=singleton.Singleton):
DelayedTaskThread(task_instance).start()
def _insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
now = sql_datetime()
now = sql_now()
exec_time = now + timedelta(seconds=delay)
cls = instance.__class__

View File

@ -48,7 +48,7 @@ class JobsFactory(factory.Factory['Job']):
Ensures that uds core workers are correctly registered in database and in factory
"""
from uds.models import Scheduler # pylint: disable=import-outside-toplevel
from uds.core.util.model import sql_datetime # pylint: disable=import-outside-toplevel
from uds.core.util.model import sql_now # pylint: disable=import-outside-toplevel
from uds.core.types.states import State # pylint: disable=import-outside-toplevel
from uds.core import workers # pylint: disable=import-outside-toplevel
@ -61,7 +61,7 @@ class JobsFactory(factory.Factory['Job']):
try:
type_.setup()
# We use database server datetime
now = sql_datetime()
now = sql_now()
next_ = now
job = Scheduler.objects.create(
name=name,

View File

@ -41,7 +41,7 @@ from django.db import transaction, DatabaseError, connections
from django.db.models import Q
from uds.models import Scheduler as DBScheduler
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from .jobs_factory import JobsFactory
@ -107,7 +107,7 @@ class JobThread(threading.Thread):
DBScheduler.objects.select_for_update().filter(id=self._db_job_id).update(
state=State.FOR_EXECUTE,
owner_server='',
next_execution=sql_datetime() + timedelta(seconds=self._freq),
next_execution=sql_now() + timedelta(seconds=self._freq),
)
@ -150,7 +150,7 @@ class Scheduler:
"""
jobInstance = None
try:
now = sql_datetime() # Datetimes are based on database server times
now = sql_now() # Datetimes are based on database server times
fltr = Q(state=State.FOR_EXECUTE) & (
Q(last_execution__gt=now) | Q(next_execution__lt=now)
)
@ -206,7 +206,7 @@ class Scheduler:
owner_server=''
) # @UndefinedVariable
DBScheduler.objects.select_for_update().filter(
last_execution__lt=sql_datetime() - timedelta(minutes=15),
last_execution__lt=sql_now() - timedelta(minutes=15),
state=State.RUNNING,
).update(
owner_server='', state=State.FOR_EXECUTE

View File

@ -34,7 +34,7 @@ import typing
import logging
from uds.core.util import singleton
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.models.log import Log
# from uds.core.workers.log
@ -74,7 +74,7 @@ class LogManager(metaclass=singleton.Singleton):
Log.objects.create(
owner_type=owner_type.value,
owner_id=owner_id,
created=sql_datetime(),
created=sql_now(),
source=source,
level=level,
data=message,

View File

@ -35,8 +35,8 @@ import typing
from django.apps import apps
from django.db import connections
from uds.core import types
from uds.core.util import singleton
from uds.core.util.log import LogLevel
if typing.TYPE_CHECKING:
pass
@ -73,7 +73,7 @@ class NotificationsManager(metaclass=singleton.Singleton):
def manager() -> 'NotificationsManager':
return NotificationsManager() # Singleton pattern will return always the same instance
def notify(self, group: str, identificator: str, level: LogLevel, message: str, *args: typing.Any) -> None:
def notify(self, group: str, identificator: str, level: types.log.LogLevel, message: str, *args: typing.Any) -> None:
from uds.models.notifications import Notification # pylint: disable=import-outside-toplevel
# Due to use of local db, we must ensure that it exists (and cannot do it on ready)

View File

@ -46,7 +46,7 @@ from uds.core.types.states import State
from uds.core.util import log
from uds.models import ServicePoolPublication, ServicePool
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.util import singleton
@ -73,7 +73,7 @@ class PublicationOldMachinesCleaner(DelayedTask):
if servicePoolPub.state != State.REMOVABLE:
logger.info('Already removed')
now = sql_datetime()
now = sql_now()
current_publication: typing.Optional[ServicePoolPublication] = (
servicePoolPub.deployed_service.active_publication()
)
@ -100,7 +100,7 @@ class PublicationLauncher(DelayedTask):
logger.debug('Publishing')
servicePoolPub: typing.Optional[ServicePoolPublication] = None
try:
now = sql_datetime()
now = sql_now()
with transaction.atomic():
servicePoolPub = ServicePoolPublication.objects.select_for_update().get(pk=self._publicationId)
if not servicePoolPub:
@ -267,7 +267,7 @@ class PublicationManager(metaclass=singleton.Singleton):
publication: typing.Optional[ServicePoolPublication] = None
try:
now = sql_datetime()
now = sql_now()
publication = servicepool.publications.create(
state=State.LAUNCHING,
state_date=now,
@ -303,9 +303,9 @@ class PublicationManager(metaclass=singleton.Singleton):
logger.info('Double cancel invoked for a publication')
log.log(
publication.deployed_service,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
'Forced cancel on publication, you must check uncleaned resources manually',
log.LogSource.ADMIN,
types.log.LogSource.ADMIN,
)
publication.set_state(State.CANCELED)
publication.save()

View File

@ -215,7 +215,7 @@ class ServerManager(metaclass=singleton.Singleton):
# Look for existing user asignation through properties
prop_name = self.property_name(userservice.user)
now = model_utils.sql_datetime()
now = model_utils.sql_now()
excluded_servers_uuids = excluded_servers_uuids or set()
@ -457,7 +457,7 @@ class ServerManager(metaclass=singleton.Singleton):
List of servers sorted by usage
"""
with transaction.atomic():
now = model_utils.sql_datetime()
now = model_utils.sql_now()
fltrs = server_group.servers.filter(maintenance_mode=False)
fltrs = fltrs.filter(Q(locked_until=None) | Q(locked_until__lte=now)) # Only unlocked servers
if excluded_servers_uuids:

View File

@ -38,25 +38,25 @@ from django.conf import settings
from uds import models
from uds.core import consts, osmanagers, types
from uds.core.util import log
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.REST.utils import rest_result
logger = logging.getLogger(__name__)
def process_log(server: 'models.Server', data: dict[str, typing.Any]) -> typing.Any:
# Log level is an string, as in log.LogLevel
# Log level is an string, as in types.log.LogLevel
if data.get('userservice_uuid', None): # Log for an user service
try:
userService = models.UserService.objects.get(uuid=data['userservice_uuid'])
log.log(
userService, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER
userService, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER
)
return rest_result(consts.OK)
except models.UserService.DoesNotExist:
pass # If not found, log on server
log.log(server, log.LogLevel.from_str(data['level']), data['message'], source=log.LogSource.SERVER)
log.log(server, types.log.LogLevel.from_str(data['level']), data['message'], source=types.log.LogSource.SERVER)
return rest_result(consts.OK)
@ -150,7 +150,7 @@ def process_ping(server: 'models.Server', data: dict[str, typing.Any]) -> typing
if 'stats' in data:
server.stats = types.servers.ServerStats.from_dict(data['stats'])
# Set stats on server
server.last_ping = sql_datetime()
server.last_ping = sql_now()
return rest_result(consts.OK)

View File

@ -39,7 +39,7 @@ import collections.abc
from uds.core import types, consts
from uds.core.util import security, cache
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
if typing.TYPE_CHECKING:
@ -63,7 +63,7 @@ def restrain_server(func: collections.abc.Callable[..., typing.Any]) -> collecti
try:
return func(self, *args, **kwargs)
except Exception as e:
restrained_until = sql_datetime() + datetime.timedelta(seconds=consts.system.FAILURE_TIMEOUT)
restrained_until = sql_now() + datetime.timedelta(seconds=consts.system.FAILURE_TIMEOUT)
logger.exception('Error executing %s: %s. Server restrained until %s', func.__name__, e, restrained_until)
self.server.set_restrained_until(
restrained_until

View File

@ -39,7 +39,7 @@ import typing
from uds.core import types
from uds.core.util import singleton
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_datetime, sql_stamp_seconds
from uds.core.util.model import sql_now, sql_stamp_seconds
from uds.models import StatsCounters, StatsCountersAccum, StatsEvents
if typing.TYPE_CHECKING:
@ -85,7 +85,7 @@ class StatsManager(metaclass=singleton.Singleton):
model: type[typing.Union['StatsCounters', 'StatsEvents', 'StatsCountersAccum']],
) -> None:
minTime = time.mktime(
(sql_datetime() - datetime.timedelta(days=GlobalConfig.STATS_DURATION.as_int())).timetuple()
(sql_now() - datetime.timedelta(days=GlobalConfig.STATS_DURATION.as_int())).timetuple()
)
model.objects.filter(stamp__lt=minTime).delete()
@ -115,7 +115,7 @@ class StatsManager(metaclass=singleton.Singleton):
Nothing
"""
if stamp is None:
stamp = sql_datetime()
stamp = sql_now()
# To Unix epoch
stampInt = int(time.mktime(stamp.timetuple())) # pylint: disable=maybe-no-member
@ -188,7 +188,7 @@ class StatsManager(metaclass=singleton.Singleton):
if since is None:
if points is None:
points = 100 # If since is not specified, we need at least points, get a default
since = sql_datetime() - datetime.timedelta(seconds=intervalType.seconds() * points)
since = sql_now() - datetime.timedelta(seconds=intervalType.seconds() * points)
if isinstance(since, datetime.datetime):
since = int(since.timestamp())

View File

@ -50,7 +50,7 @@ from uds.core.services.exceptions import (
)
from uds.core.util import log, singleton
from uds.core.util.decorators import cached
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.core.util.stats import events
from uds.models import MetaPool, ServicePool, ServicePoolPublication, Transport, User, UserService
@ -124,7 +124,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
"""
# Checks if userservices_limit has been reached and if so, raises an exception
self._check_user_services_limit_reached(publication.deployed_service)
now = sql_datetime()
now = sql_now()
return publication.userServices.create(
cache_level=cacheLevel,
state=State.PREPARING,
@ -144,7 +144,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
Private method to instatiate an assigned element at database with default state
"""
self._check_user_services_limit_reached(publication.deployed_service)
now = sql_datetime()
now = sql_now()
return publication.userServices.create(
cache_level=0,
state=State.PREPARING,
@ -166,7 +166,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
an UserService with no publications, and create them from an ServicePool
"""
self._check_user_services_limit_reached(service_pool)
now = sql_datetime()
now = sql_now()
return service_pool.userServices.create(
cache_level=0,
state=State.PREPARING,
@ -509,9 +509,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
): # cacheUpdater will drop unnecesary L1 machines, so it's not neccesary to check against inCacheL1
log.log(
service_pool,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Max number of services reached: {service_pool.max_srvs}',
log.LogSource.INTERNAL,
types.log.LogSource.INTERNAL,
)
raise MaxServicesReachedError()
@ -805,9 +805,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
service_status = types.services.ReadyStatus.USERSERVICE_NO_IP
log.log(
user_service,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f"User {user.pretty_name} from {src_ip} has initiated access",
log.LogSource.WEB,
types.log.LogSource.WEB,
)
# If ready, show transport for this service, if also ready ofc
userServiceInstance = user_service.get_instance()
@ -819,9 +819,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
service_status = types.services.ReadyStatus.USERSERVICE_INVALID_UUID
log.log(
user_service,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'User service is not accessible due to invalid UUID (user: {user.pretty_name}, ip: {ip})',
log.LogSource.TRANSPORT,
types.log.LogSource.TRANSPORT,
)
logger.debug('UUID check failed for user service %s', user_service)
else:
@ -837,7 +837,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
service_status = types.services.ReadyStatus.TRANSPORT_NOT_READY
transportInstance = transport.get_instance()
if transportInstance.is_ip_allowed(user_service, ip):
log.log(user_service, log.LogLevel.INFO, "User service ready", log.LogSource.WEB)
log.log(
user_service, types.log.LogLevel.INFO, "User service ready", types.log.LogSource.WEB
)
self.notify_preconnect(
user_service,
transportInstance.get_connection_info(user_service, user, ''),
@ -858,7 +860,7 @@ class UserServiceManager(metaclass=singleton.Singleton):
)
message = transportInstance.get_available_error_msg(user_service, ip)
log.log(user_service, log.LogLevel.WARNING, message, log.LogSource.TRANSPORT)
log.log(user_service, types.log.LogLevel.WARNING, message, types.log.LogSource.TRANSPORT)
logger.debug(
'Transport is not ready for user service %s: %s',
user_service,
@ -869,9 +871,9 @@ class UserServiceManager(metaclass=singleton.Singleton):
else:
log.log(
user_service,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'User {user.pretty_name} from {src_ip} tried to access, but service was not ready',
log.LogSource.WEB,
types.log.LogSource.WEB,
)
trace_logger.error(
@ -1059,8 +1061,8 @@ class UserServiceManager(metaclass=singleton.Singleton):
log.log(
meta,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'No user service accessible from device (ip {srcIp}, os: {os.os.name})',
log.LogSource.SERVICE,
types.log.LogSource.SERVICE,
)
raise InvalidServiceException(_('The service is not accessible from this device'))

View File

@ -65,7 +65,7 @@ class StateUpdater(abc.ABC):
logger.error('Got error on processor: %s', msg)
self.save(types.states.State.ERROR)
if msg is not None:
log.log(self.user_service, log.LogLevel.ERROR, msg, log.LogSource.INTERNAL)
log.log(self.user_service, types.log.LogLevel.ERROR, msg, types.log.LogSource.INTERNAL)
def save(self, newState: typing.Optional[str] = None) -> None:
if newState:
@ -263,7 +263,7 @@ class UserServiceOpChecker(DelayedTask):
except Exception as e:
logger.exception('Checking service state')
log.log(userservice, log.LogLevel.ERROR, f'Exception: {e}', log.LogSource.INTERNAL)
log.log(userservice, types.log.LogLevel.ERROR, f'Exception: {e}', types.log.LogSource.INTERNAL)
userservice.set_state(types.states.State.ERROR)
userservice.save(update_fields=['data'])
@ -304,7 +304,7 @@ class UserServiceOpChecker(DelayedTask):
# Exception caught, mark service as errored
logger.exception("Error %s, %s :", e.__class__, e)
if userservice:
log.log(userservice, log.LogLevel.ERROR, f'Exception: {e}', log.LogSource.INTERNAL)
log.log(userservice, types.log.LogLevel.ERROR, f'Exception: {e}', types.log.LogSource.INTERNAL)
try:
userservice.set_state(types.states.State.ERROR)
userservice.save(update_fields=['data'])

View File

@ -36,9 +36,9 @@ import typing
from uds.core.managers.task import BaseThread
from uds.models import Notifier, Notification
from uds.core import consts
from uds.core.util.model import sql_datetime
from .provider import Notifier as NotificationProviderModule, LogLevel
from uds.core import consts, types
from uds.core.util.model import sql_now
from .provider import Notifier as NotificationProviderModule
from .config import DO_NOT_REPEAT
logger = logging.getLogger(__name__)
@ -76,7 +76,7 @@ class MessageProcessorThread(BaseThread):
while self._keep_running:
# Locate all notifications from "persistent" and try to process them
# If no notification can be fully resolved, it will be kept in the database
not_before = sql_datetime() - datetime.timedelta(
not_before = sql_now() - datetime.timedelta(
seconds=DO_NOT_REPEAT.as_int()
)
for n in Notification.get_persistent_queryset().all():
@ -130,7 +130,7 @@ class MessageProcessorThread(BaseThread):
p.notify(
n.group,
n.identificator,
LogLevel.from_int(n.level),
types.log.LogLevel.from_int(n.level),
n.message,
)
except Exception:

View File

@ -44,7 +44,7 @@ from django.utils.translation import gettext_noop as _
from uds.core import exceptions, types
from uds.core.ui import gui
from uds.core.module import Module
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.models.network import Network
if typing.TYPE_CHECKING:
@ -230,7 +230,7 @@ class MFA(Module):
Internal method to put the data into storage
"""
storageKey = request.ip + userId
self.storage.save_pickled(storageKey, (sql_datetime(), code))
self.storage.save_pickled(storageKey, (sql_now(), code))
def process(
self,
@ -267,7 +267,7 @@ class MFA(Module):
try:
if data and validity:
# if we have a stored code, check if it's still valid
if data[0] + datetime.timedelta(seconds=validity) > sql_datetime():
if data[0] + datetime.timedelta(seconds=validity) > sql_now():
# if it's still valid, just return without sending a new one
return MFA.RESULT.OK
except Exception:
@ -320,7 +320,7 @@ class MFA(Module):
data = self._get_data(request, userId)
if data and len(data) == 2:
validity = validity if validity is not None else 0
if validity > 0 and data[0] + datetime.timedelta(seconds=validity) < sql_datetime():
if validity > 0 and data[0] + datetime.timedelta(seconds=validity) < sql_now():
# if it is no more valid, raise an error
# Remove stored code and raise error
self._remove_data(request, userId)

View File

@ -262,9 +262,9 @@ class OSManager(Module):
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'User {username} has logged in',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
log.log_use(
@ -326,9 +326,9 @@ class OSManager(Module):
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'User {username} has logged out',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
log.log_use(

View File

@ -37,7 +37,7 @@ import typing
import collections.abc
from uds.core import services, types, consts
from uds.core.util import log, autoserializable
from uds.core.util import autoserializable
from uds.core.util.model import sql_stamp_seconds
from .. import exceptions
@ -214,7 +214,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
self._error_debug_info = self._debug(repr(reason))
reason = str(reason)
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._vmid:
if self.service().should_maintain_on_error() is False:
@ -224,7 +224,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
except Exception as e:
logger.exception('Exception removing machine %s: %s', self._vmid, e)
self._vmid = ''
self.do_log(log.LogLevel.ERROR, f'Error removing machine: {e}')
self.do_log(types.log.LogLevel.ERROR, f'Error removing machine: {e}')
else:
logger.debug('Keep on error is enabled, not removing machine')
self._set_queue([types.services.Operation.FINISH] if self.keep_state_sets_error else [types.services.Operation.ERROR])
@ -676,7 +676,7 @@ class DynamicUserService(services.UserService, autoserializable.AutoSerializable
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
logger.debug('Time is consumed, falling back to stop on vmid %s', self._vmid)
self.do_log(
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
)
# Not stopped by guest in time, but must be stopped normally

View File

@ -37,7 +37,7 @@ import collections.abc
from uds.core import consts, services, types
from uds.core.types.services import Operation
from uds.core.util import log, autoserializable
from uds.core.util import autoserializable
from .. import exceptions
@ -145,7 +145,7 @@ class FixedUserService(services.UserService, autoserializable.AutoSerializable,
"""
reason = str(reason)
logger.debug('Setting error state, reason: %s (%s)', reason, self._queue, stack_info=True, stacklevel=3)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._vmid:
if self.service().should_maintain_on_error() is False:
@ -229,7 +229,7 @@ class FixedUserService(services.UserService, autoserializable.AutoSerializable,
if self._vmid:
return self.service().get_ip(self._vmid)
except exceptions.NotFoundError:
self.do_log(log.LogLevel.ERROR, f'Machine not found: {self._vmid}::{self._name}')
self.do_log(types.log.LogLevel.ERROR, f'Machine not found: {self._vmid}::{self._name}')
except Exception:
pass

View File

@ -224,14 +224,14 @@ class ServiceProvider(module.Module):
return ret_val
def do_log(self, level: log.LogLevel, message: str) -> None:
def do_log(self, level: 'types.log.LogLevel', message: str) -> None:
"""
Logs a message with requested level associated with this service
"""
from uds.models import Provider as DBProvider # pylint: disable=import-outside-toplevel
if self.get_uuid():
log.log(DBProvider.objects.get(uuid=self.get_uuid()), level, message, log.LogSource.SERVICE)
log.log(DBProvider.objects.get(uuid=self.get_uuid()), level, message, types.log.LogSource.SERVICE)
def __str__(self) -> str:
"""

View File

@ -482,14 +482,14 @@ class Service(Module):
"""
return False
def do_log(self, level: log.LogLevel, message: str) -> None:
def do_log(self, level: types.log.LogLevel, message: str) -> None:
"""
Logs a message with requested level associated with this service
"""
from uds.models import Service as DBService # pylint: disable=import-outside-toplevel
if self.get_uuid():
log.log(DBService.objects.get(uuid=self.get_uuid()), level, message, log.LogSource.SERVICE)
log.log(DBService.objects.get(uuid=self.get_uuid()), level, message, types.log.LogSource.SERVICE)
@classmethod
def can_assign(cls) -> bool:

View File

@ -227,12 +227,12 @@ class UserService(Environmentable, Serializable, abc.ABC):
def get_uuid(self) -> str:
return self._uuid
def do_log(self, level: log.LogLevel, message: str) -> None:
def do_log(self, level: types.log.LogLevel, message: str) -> None:
"""
Logs a message with requested level associated with this user deployment
"""
if self._db_obj:
log.log(self._db_obj, level, message, log.LogSource.SERVICE)
log.log(self._db_obj, level, message, types.log.LogSource.SERVICE)
def mac_generator(self) -> 'UniqueMacGenerator':
"""

View File

@ -47,6 +47,7 @@ from . import (
transports,
ui,
core,
log,
)
# Log is not imported here, as it is a special case with lots of dependencies

View File

@ -3,12 +3,90 @@ import collections.abc
import functools
import enum
from uds import models
# Not imported at runtime, just for type checking
if typing.TYPE_CHECKING:
from django.db.models import Model
class LogLevel(enum.IntEnum):
OTHER = 10000
DEBUG = 20000
INFO = 30000
WARNING = 40000
ERROR = 50000
CRITICAL = 60000
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return self.name
@staticmethod
def from_str(level: str) -> 'LogLevel':
try:
return LogLevel[level.upper()]
except Exception:
# logger.error('Error getting log level from string: %s', e)
return LogLevel.OTHER
@staticmethod
def from_int(level: int) -> 'LogLevel':
try:
return LogLevel(level)
except ValueError:
return LogLevel.OTHER
@staticmethod
def from_actor_level(level: int) -> 'LogLevel':
"""
Returns the log level for actor log level
"""
return [LogLevel.DEBUG, LogLevel.INFO, LogLevel.ERROR, LogLevel.CRITICAL][level % 4]
@staticmethod
def from_logging_level(level: int) -> 'LogLevel':
"""
Returns the log level for logging log level
"""
return [
LogLevel.OTHER,
LogLevel.DEBUG,
LogLevel.INFO,
LogLevel.WARNING,
LogLevel.ERROR,
LogLevel.CRITICAL,
][level // 10]
# Return all Log levels as tuples of (level value, level name)
@staticmethod
def all() -> list[tuple[int, str]]:
return [(level.value, level.name) for level in LogLevel]
# Rteturns "interesting" log levels
@staticmethod
def interesting() -> list[tuple[int, str]]:
"""Returns "interesting" log levels
Interesting log levels are those that are ABOBE INFO level (that is, errors, etc..)
"""
return [(level.value, level.name) for level in LogLevel if level.value > LogLevel.INFO.value]
class LogSource(enum.StrEnum):
INTERNAL = 'internal'
ACTOR = 'actor'
TRANSPORT = 'transport'
OSMANAGER = 'osmanager'
UNKNOWN = 'unknown'
WEB = 'web'
ADMIN = 'admin'
SERVICE = 'service'
SERVER = 'server'
REST = 'rest'
LOGS = 'logs'
# Note: Once assigned a value, do not change it, as it will break the log
class LogObjectType(enum.IntEnum):
USERSERVICE = 0
@ -33,24 +111,26 @@ class LogObjectType(enum.IntEnum):
if self == LogObjectType.SYSLOG:
return GlobalConfig.GENERAL_LOG_MAX_ELEMENTS.as_int()
return GlobalConfig.INDIVIDIAL_LOG_MAX_ELEMENTS.as_int()
@staticmethod
def get_type_from_model(model: 'Model') -> 'LogObjectType|None':
"""
Returns the type of log object from the model
"""
return _MODEL_TO_TYPE.get(type(model), None)
from uds import models
# Dict for translations
_MODEL_TO_TYPE: typing.Final[collections.abc.Mapping[type['Model'], LogObjectType]] = {
models.UserService: LogObjectType.USERSERVICE,
models.ServicePoolPublication: LogObjectType.PUBLICATION,
models.ServicePool: LogObjectType.SERVICEPOOL,
models.Service: LogObjectType.SERVICE,
models.Server: LogObjectType.SERVER,
models.Provider: LogObjectType.PROVIDER,
models.User: LogObjectType.USER,
models.Group: LogObjectType.GROUP,
models.Authenticator: LogObjectType.AUTHENTICATOR,
models.MetaPool: LogObjectType.METAPOOL,
}
# Dict for translations
_MODEL_TO_TYPE: typing.Final[collections.abc.Mapping[type['Model'], 'LogObjectType']] = {
models.UserService: LogObjectType.USERSERVICE,
models.ServicePoolPublication: LogObjectType.PUBLICATION,
models.ServicePool: LogObjectType.SERVICEPOOL,
models.Service: LogObjectType.SERVICE,
models.Server: LogObjectType.SERVER,
models.Provider: LogObjectType.PROVIDER,
models.User: LogObjectType.USER,
models.Group: LogObjectType.GROUP,
models.Authenticator: LogObjectType.AUTHENTICATOR,
models.MetaPool: LogObjectType.METAPOOL,
}
return _MODEL_TO_TYPE.get(type(model), None)

View File

@ -38,7 +38,7 @@ import logging
from django.db import transaction
from uds.models.cache import Cache as DBCache
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.util import serializer
from uds.core import consts
@ -74,7 +74,7 @@ class Cache:
return hash_key(self._owner.encode() + key)
def get(self, skey: typing.Union[str, bytes], default: typing.Any = None) -> typing.Any:
now = sql_datetime()
now = sql_now()
# logger.debug('Requesting key "%s" for cache "%s"', skey, self._owner)
try:
key = self._get_key(skey)
@ -159,7 +159,7 @@ class Cache:
validity = consts.cache.DEFAULT_CACHE_TIMEOUT
key = self._get_key(skey)
strValue = Cache._serializer(value)
now = sql_datetime()
now = sql_now()
# Remove existing if any and create a new one
with transaction.atomic():
try:
@ -200,7 +200,7 @@ class Cache:
try:
key = self._get_key(skey)
c = DBCache.objects.get(pk=key)
c.created = sql_datetime()
c.created = sql_now()
c.save()
except DBCache.DoesNotExist:
logger.debug('Can\'t refresh cache key %s because it doesn\'t exists', skey)

View File

@ -40,7 +40,7 @@ import bitarray
from django.core.cache import caches
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.models.calendar import Calendar
@ -140,7 +140,7 @@ class CalendarChecker:
@param dtime: Datetime object to check
"""
if dtime is None:
dtime = sql_datetime()
dtime = sql_now()
# memcached access
memcache_storage = caches['memory']
@ -182,7 +182,7 @@ class CalendarChecker:
"""
logger.debug('Obtaining nextEvent')
if not check_from:
check_from = sql_datetime()
check_from = sql_now()
if not offset:
offset = datetime.timedelta(minutes=0)

View File

@ -319,7 +319,7 @@ def cached(
# Execute the function outside the DB transaction
t = time.thread_time_ns()
data = fnc(*args, **kwargs)
data = fnc(*args, **kwargs) # pyright: ignore # For some reason, pyright does not like this line
exec_time += time.thread_time_ns() - t
try:

View File

@ -34,11 +34,12 @@ import os
import logging
import logging.handlers
import typing
import enum
import re
from django.apps import apps
from uds.core.types.log import LogLevel, LogSource
try:
from systemd import journal
except ImportError:
@ -59,85 +60,6 @@ LOGLEVEL_PATTERN: typing.Final[typing.Pattern[str]] = re.compile(r'^(DEBUG|INFO|
class LogLevel(enum.IntEnum):
OTHER = 10000
DEBUG = 20000
INFO = 30000
WARNING = 40000
ERROR = 50000
CRITICAL = 60000
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return self.name
@staticmethod
def from_str(level: str) -> 'LogLevel':
try:
return LogLevel[level.upper()]
except Exception:
# logger.error('Error getting log level from string: %s', e)
return LogLevel.OTHER
@staticmethod
def from_int(level: int) -> 'LogLevel':
try:
return LogLevel(level)
except ValueError:
return LogLevel.OTHER
@staticmethod
def from_actor_level(level: int) -> 'LogLevel':
"""
Returns the log level for actor log level
"""
return [LogLevel.DEBUG, LogLevel.INFO, LogLevel.ERROR, LogLevel.CRITICAL][level % 4]
@staticmethod
def from_logging_level(level: int) -> 'LogLevel':
"""
Returns the log level for logging log level
"""
return [
LogLevel.OTHER,
LogLevel.DEBUG,
LogLevel.INFO,
LogLevel.WARNING,
LogLevel.ERROR,
LogLevel.CRITICAL,
][level // 10]
# Return all Log levels as tuples of (level value, level name)
@staticmethod
def all() -> list[tuple[int, str]]:
return [(level.value, level.name) for level in LogLevel]
# Rteturns "interesting" log levels
@staticmethod
def interesting() -> list[tuple[int, str]]:
"""Returns "interesting" log levels
Interesting log levels are those that are ABOBE INFO level (that is, errors, etc..)
"""
return [(level.value, level.name) for level in LogLevel if level.value > LogLevel.INFO.value]
class LogSource(enum.StrEnum):
INTERNAL = 'internal'
ACTOR = 'actor'
TRANSPORT = 'transport'
OSMANAGER = 'osmanager'
UNKNOWN = 'unknown'
WEB = 'web'
ADMIN = 'admin'
SERVICE = 'service'
SERVER = 'server'
REST = 'rest'
LOGS = 'logs'
def log_use(
type_: str,
serviceUniqueId: str,

View File

@ -87,7 +87,7 @@ class TimeTrack:
return date
@staticmethod
def sql_datetime() -> datetime.datetime:
def sql_now() -> datetime.datetime:
now = datetime.datetime.now()
with TimeTrack.lock:
diff = now - TimeTrack.last_check
@ -102,11 +102,11 @@ class TimeTrack:
return TimeTrack.cached_time + (now - TimeTrack.last_check)
def sql_datetime() -> datetime.datetime:
def sql_now() -> datetime.datetime:
"""Returns the current date/time of the database server.
Has been updated to use TimeTrack, which reduces the queries to database to get the current time
"""
return TimeTrack.sql_datetime()
return TimeTrack.sql_now()
def sql_stamp_seconds() -> int:
@ -115,7 +115,7 @@ def sql_stamp_seconds() -> int:
Returns:
int: Unix timestamp
"""
return int(mktime(sql_datetime().timetuple()))
return int(mktime(sql_now().timetuple()))
def sql_stamp() -> float:
@ -124,7 +124,7 @@ def sql_stamp() -> float:
Returns:
float: Unix timestamp
"""
return float(mktime(sql_datetime().timetuple())) + sql_datetime().microsecond / 1000000.0
return float(mktime(sql_now().timetuple())) + sql_now().microsecond / 1000000.0
def generate_uuid(obj: typing.Any = None) -> str:

View File

@ -34,12 +34,13 @@ from datetime import timedelta
from django.db.models import Q, Count
from uds.core import types
from uds.core.jobs import Job
from uds.core.util import log
from uds.core.util.config import GlobalConfig
from uds.core.types.states import State
from uds.models import ServicePool
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
logger = logging.getLogger(__name__)
@ -50,7 +51,7 @@ class AssignedAndUnused(Job):
friendly_name = 'Unused services checker'
def run(self) -> None:
since_state = sql_datetime() - timedelta(
since_state = sql_now() - timedelta(
seconds=GlobalConfig.CHECK_UNUSED_TIME.as_int()
)
# Locate service pools with pending assigned service in use
@ -93,8 +94,8 @@ class AssignedAndUnused(Job):
)
log.log(
us,
log.LogLevel.INFO,
source=log.LogSource.SERVER,
types.log.LogLevel.INFO,
source=types.log.LogSource.SERVER,
message='Removing unused assigned service',
)
us.release()

View File

@ -33,10 +33,11 @@ from datetime import timedelta
import logging
from django.db.models import Q, Count
from uds.core import types
from uds.core.util.config import GlobalConfig
from uds.models import ServicePool, UserService
from uds.core.util.model import sql_datetime
from uds.core.types.states import State
from uds.core.util.model import sql_now
from uds.core.jobs import Job
from uds.core.util import log
@ -49,15 +50,15 @@ class HangedCleaner(Job):
friendly_name = 'Hanged services checker'
def run(self) -> None:
now = sql_datetime()
now = sql_now()
since_state = now - timedelta(
seconds=GlobalConfig.MAX_INITIALIZING_TIME.as_int()
)
since_removing = now - timedelta(seconds=GlobalConfig.MAX_REMOVAL_TIME.as_int())
# Filter for locating machine not ready
flt = Q(state_date__lt=since_state, state=State.PREPARING) | Q(
state_date__lt=since_state, state=State.USABLE, os_state=State.PREPARING
) | Q(state_date__lt=since_removing, state__in=[State.REMOVING, State.CANCELING])
flt = Q(state_date__lt=since_state, state=types.states.State.PREPARING) | Q(
state_date__lt=since_state, state=types.states.State.USABLE, os_state=types.states.State.PREPARING
) | Q(state_date__lt=since_removing, state__in=[types.states.State.REMOVING, types.states.State.CANCELING])
servicepools_with_hanged = (
ServicePool.objects.annotate(
@ -66,22 +67,22 @@ class HangedCleaner(Job):
# Rewrited Filter for servicePool
filter=Q(
userServices__state_date__lt=since_state,
userServices__state=State.PREPARING,
userServices__state=types.states.State.PREPARING,
)
| Q(
userServices__state_date__lt=since_state,
userServices__state=State.USABLE,
userServices__os_state=State.PREPARING,
userServices__state=types.states.State.USABLE,
userServices__os_state=types.states.State.PREPARING,
)
| Q(
userServices__state_date__lt=since_removing,
userServices__state__in=[State.REMOVING, State.CANCELING],
userServices__state__in=[types.states.State.REMOVING, types.states.State.CANCELING],
),
)
)
.exclude(hanged=0)
.exclude(service__provider__maintenance_mode=True)
.filter(state=State.ACTIVE)
.filter(state=types.states.State.ACTIVE)
)
# Type
@ -95,30 +96,30 @@ class HangedCleaner(Job):
continue
logger.debug('Found hanged service %s', us)
if (
us.state in [State.REMOVING, State.CANCELING]
us.state in [types.states.State.REMOVING, types.states.State.CANCELING]
): # Removing too long, remark it as removable
log.log(
us,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
'User Service hanged on removal process. Restarting removal.',
log.LogSource.INTERNAL,
types.log.LogSource.INTERNAL,
)
log.log(
servicePool,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
f'User service {us.friendly_name} hanged on removal. Restarting removal.',
)
us.release() # Mark it again as removable, and let's see
else:
log.log(
us,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
'User Service seems to be hanged. Removing it.',
log.LogSource.INTERNAL,
types.log.LogSource.INTERNAL,
)
log.log(
servicePool,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
f'Removing user service {us.friendly_name} because it seems to be hanged'
)
us.release_or_cancel()

View File

@ -36,7 +36,7 @@ import collections.abc
from uds.core.managers import publication_manager
from uds.core.util.config import GlobalConfig
from uds.models import ServicePoolPublication
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.services.exceptions import PublishException
from uds.core.types.states import State
from uds.core.jobs import Job
@ -52,7 +52,7 @@ class PublicationInfoItemsCleaner(Job):
friendly_name = 'Publications Info Cleaner'
def run(self) -> None:
removeFrom = sql_datetime() - timedelta(
removeFrom = sql_now() - timedelta(
seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True)
)
ServicePoolPublication.objects.filter(

View File

@ -34,7 +34,7 @@ import logging
from uds.core import types
from uds.models import CalendarAction
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.jobs import Job
logger = logging.getLogger(__name__)
@ -49,7 +49,7 @@ class ScheduledAction(Job):
for configuredAction in CalendarAction.objects.filter(
service_pool__service__provider__maintenance_mode=False, # Avoid maintenance
service_pool__state=types.states.State.ACTIVE, # Avoid Non active pools
next_execution__lt=sql_datetime(),
next_execution__lt=sql_now(),
).order_by('next_execution'):
logger.info(
'Executing calendar action %s.%s (%s)',

View File

@ -36,7 +36,7 @@ import logging
from django.db import transaction
from uds.models import Scheduler
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.core.jobs import Job
@ -57,7 +57,7 @@ class SchedulerHousekeeping(Job):
"""
Look for "hanged" scheduler tasks and reschedule them
"""
since = sql_datetime() - timedelta(minutes=MAX_EXECUTION_MINUTES)
since = sql_now() - timedelta(minutes=MAX_EXECUTION_MINUTES)
for _ in range(3): # Retry three times in case of lockout error
try:
with transaction.atomic():

View File

@ -36,7 +36,7 @@ import collections.abc
from django.db import transaction
from uds.core.util.config import GlobalConfig
from uds.models import ServicePool, UserService
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.core.jobs import Job
@ -53,7 +53,7 @@ class DeployedServiceInfoItemsCleaner(Job):
friendly_name = 'Deployed Service Info Cleaner'
def run(self) -> None:
removeFrom = sql_datetime() - timedelta(
removeFrom = sql_now() - timedelta(
seconds=GlobalConfig.KEEP_INFO_TIME.as_int()
)
ServicePool.objects.filter(
@ -91,13 +91,13 @@ class DeployedServiceRemover(Job):
userService.cancel()
# Nice start of removal, maybe we need to do some limitation later, but there should not be too much services nor publications cancelable at once
service_pool.state = State.REMOVING
service_pool.state_date = sql_datetime() # Now
service_pool.state_date = sql_now() # Now
service_pool.name += ' (removed)'
service_pool.save(update_fields=['state', 'state_date', 'name'])
def continue_removal_of(self, servicePool: ServicePool) -> None:
# get current time
now = sql_datetime()
now = sql_now()
# Recheck that there is no publication created just after "startRemovalOf"
try:
@ -194,9 +194,9 @@ class DeployedServiceRemover(Job):
for servicepool in already_removing_servicepools:
try:
if servicepool.state_date.year == 1972:
servicepool.state_date = sql_datetime()
servicepool.state_date = sql_now()
servicepool.save(update_fields=['state_date'])
if servicepool.state_date < sql_datetime() - timedelta(
if servicepool.state_date < sql_now() - timedelta(
seconds=MAX_REMOVING_TIME
):
self.force_removal_of(servicepool) # Force removal

View File

@ -121,9 +121,9 @@ class ServiceCacheUpdater(Job):
remaining_restraing_time = servicepool.remaining_restraint_time()
log.log(
servicepool,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Service Pool is restrained due to excesive errors (will be available in {remaining_restraing_time} seconds)',
log.LogSource.INTERNAL,
types.log.LogSource.INTERNAL,
)
logger.info(
'%s will be restrained during %s seconds. Will check this later',
@ -308,9 +308,9 @@ class ServiceCacheUpdater(Job):
except MaxServicesReachedError:
log.log(
servicepool_stats.servicepool,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
'Max number of services reached for this service',
log.LogSource.INTERNAL,
types.log.LogSource.INTERNAL,
)
logger.warning(
'Max user services reached for %s: %s. Cache not created',

View File

@ -58,7 +58,7 @@ class DeployedServiceStatsCollector(Job):
service_pool_to_check: collections.abc.Iterable[
models.ServicePool
] = models.ServicePool.objects.filter(state=State.ACTIVE).iterator()
stamp = model.sql_datetime()
stamp = model.sql_now()
# Global counters
totalAssigned, totalInUse, totalCached = 0, 0, 0
for servicePool in service_pool_to_check:

View File

@ -35,9 +35,9 @@ import collections.abc
from django.db.models import Q, Count
from uds.core import types
from uds.models import ServicePool, UserService
from uds.core.util.model import sql_datetime
from uds.core.types.states import State
from uds.core.util.model import sql_now
from uds.core.jobs import Job
from uds.core.util import log
@ -56,7 +56,7 @@ class StuckCleaner(Job):
friendly_name = 'Stuck States cleaner'
def run(self) -> None:
since_state: datetime = sql_datetime() - timedelta(seconds=MAX_STUCK_TIME)
since_state: datetime = sql_now() - timedelta(seconds=MAX_STUCK_TIME)
# Filter for locating machine stuck on removing, cancelling, etc..
# Locate service pools with pending assigned service in use
servicePoolswithStucks = (
@ -66,13 +66,13 @@ class StuckCleaner(Job):
filter=Q(userServices__state_date__lt=since_state)
& (
Q(
userServices__state=State.PREPARING,
userServices__state=types.states.State.PREPARING,
)
| ~Q(userServices__state__in=State.INFO_STATES + State.VALID_STATES)
| ~Q(userServices__state__in=types.states.State.INFO_STATES + types.states.State.VALID_STATES)
),
)
)
.filter(service__provider__maintenance_mode=False, state=State.ACTIVE)
.filter(service__provider__maintenance_mode=False, state=types.states.State.ACTIVE)
.exclude(stuckCount=0)
)
@ -81,8 +81,8 @@ class StuckCleaner(Job):
q = servicePool.userServices.filter(state_date__lt=since_state)
# Get all that are not in valid or info states, AND the ones that are "PREPARING" with
# "destroy_after" property set (exists) (that means that are waiting to be destroyed after initializations)
yield from q.exclude(state__in=State.INFO_STATES + State.VALID_STATES)
yield from q.filter(state=State.PREPARING)
yield from q.exclude(state__in=types.states.State.INFO_STATES + types.states.State.VALID_STATES)
yield from q.filter(state=types.states.State.PREPARING)
for servicepool in servicePoolswithStucks:
if servicepool.service.get_instance().allows_errored_userservice_cleanup() is False:
@ -92,7 +92,7 @@ class StuckCleaner(Job):
logger.debug('Found stuck user service %s', stuck)
log.log(
servicepool,
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
f'User service {stuck.name} has been hard removed because it\'s stuck',
)
# stuck.set_state(State.ERROR)

View File

@ -34,7 +34,7 @@ import logging
from django.db import transaction
from uds.models import AccountUsage
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.jobs import Job
logger = logging.getLogger(__name__)
@ -48,7 +48,7 @@ class UsageAccounting(Job):
with transaction.atomic():
AccountUsage.objects.select_for_update().filter(
user_service__in_use=True
).update(end=sql_datetime())
).update(end=sql_now())
AccountUsage.objects.select_for_update().filter(
user_service__in_use=False
).update(

View File

@ -38,7 +38,7 @@ from django.db import transaction
from uds.core.managers.userservice import UserServiceManager
from uds.core.util.config import GlobalConfig
from uds.models import UserService
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.core.jobs import Job
@ -58,7 +58,7 @@ class UserServiceInfoItemsCleaner(Job):
friendly_name = 'User Service Info Cleaner'
def run(self) -> None:
remove_since = sql_datetime() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True))
remove_since = sql_now() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.as_int(True))
logger.debug('Removing information user services from %s', remove_since)
with transaction.atomic():
UserService.objects.select_for_update().filter(
@ -80,7 +80,7 @@ class UserServiceRemover(Job):
manager = UserServiceManager()
with transaction.atomic():
removeFrom = sql_datetime() - timedelta(
removeFrom = sql_now() - timedelta(
seconds=10
) # We keep at least 10 seconds the machine before removing it, so we avoid connections errors
candidates: collections.abc.Iterable[UserService] = UserService.objects.filter(

View File

@ -90,8 +90,8 @@ def guacamole(request: ExtendedHttpRequestWithUser, token: str, tunnelId: str) -
protocol = 'RDS' if 'remote-app' in val else val['protocol'].upper()
host = val.get('hostname', '0.0.0.0') # nosec: Not a bind, just a placeholder for "no host"
msg = f'User {user.name} started HTML5 {protocol} tunnel to {host}.'
log.log(user.manager, log.LogLevel.INFO, msg)
log.log(userService, log.LogLevel.INFO, msg)
log.log(user.manager, types.log.LogLevel.INFO, msg)
log.log(userService, types.log.LogLevel.INFO, msg)
events.add_event(
userService.deployed_service,

View File

@ -42,7 +42,7 @@ from django.core.management.base import BaseCommand
from uds.core.util import log, model, config
from uds import models
from uds.core.types.states import State
from uds.core import types
logger = logging.getLogger(__name__)
@ -136,7 +136,7 @@ class Command(BaseCommand):
return f'{cntr:02d}.-{s}'
max_items = int(options['maxitems'])
now = model.sql_datetime()
now = model.sql_now()
tree: dict[str, typing.Any] = {}
try:
@ -155,10 +155,10 @@ class Command(BaseCommand):
userservices: dict[str, typing.Any] = {}
fltr = service_pool.userServices.all()
if not options['alluserservices']:
fltr = fltr.filter(state=State.ERROR)
fltr = fltr.filter(state=types.states.State.ERROR)
for item in fltr[:max_items]: # at most max_items items
logs = [
f'{l["date"]}: {log.LogLevel.from_int(l["level"])} [{l["source"]}] - {l["message"]}'
f'{l["date"]}: {types.log.LogLevel.from_int(l["level"])} [{l["source"]}] - {l["message"]}'
for l in log.get_logs(item)
]
userservices[item.friendly_name] = {
@ -166,8 +166,8 @@ class Command(BaseCommand):
'id': item.uuid,
'unique_id': item.unique_id,
'friendly_name': item.friendly_name,
'state': State.from_str(item.state).localized,
'os_state': State.from_str(item.os_state).localized,
'state': types.states.State.from_str(item.state).localized,
'os_state': types.states.State.from_str(item.os_state).localized,
'state_date': item.state_date,
'creation_date': item.creation_date,
'revision': item.publication and item.publication.revision or '',

View File

@ -39,7 +39,7 @@ import qrcode
from django.utils.translation import gettext_noop as _, gettext
from uds import models
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core import mfas, exceptions, types
from uds.core.ui import gui
@ -200,7 +200,7 @@ class TOTP_MFA(mfas.MFA):
# Validate code
if not self.get_totp(userId, username).verify(
code, valid_window=self.valid_window.as_int(), for_time=sql_datetime()
code, valid_window=self.valid_window.as_int(), for_time=sql_now()
):
raise exceptions.auth.MFAError(gettext('Invalid code'))

View File

@ -576,12 +576,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='group',
name='created',
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_datetime),
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_now),
),
migrations.AddField(
model_name='user',
name='created',
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_datetime),
field=models.DateTimeField(blank=True, default=uds.core.util.model.sql_now),
),
migrations.AlterField(
model_name='deployedservice',

View File

@ -142,7 +142,7 @@ class Migration(migrations.Migration):
max_length=128,
),
),
("start", models.DateTimeField(default=uds.core.util.model.sql_datetime)),
("start", models.DateTimeField(default=uds.core.util.model.sql_now)),
("end", models.DateTimeField(blank=True, null=True)),
],
options={

View File

@ -119,7 +119,7 @@ class IPMachinesService(services.Service):
if values[0] in (b'v6', b'v7'):
self.lockByExternalAccess.value = gui.as_bool(values[5].decode())
if values[0] in (b'v7',):
self.useRandomIp = gui.as_bool(values[6].decode())
self.useRandomIp.value = gui.as_bool(values[6].decode())
# Note that will be marshalled as new format, so we don't need to care about old format in code anymore :)
def post_migrate(self, apps: typing.Any, record: typing.Any) -> None:

View File

@ -35,7 +35,7 @@ from django.db import models
from .uuid_model import UUIDModel
from .tag import TaggingMixin
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
from ..core.consts import NEVER
logger = logging.getLogger(__name__)
@ -62,7 +62,7 @@ class Account(UUIDModel, TaggingMixin):
if hasattr(userService, 'accounting'): # Already has an account
return None
start = sql_datetime()
start = sql_now()
if userService.user:
userName = userService.user.pretty_name
@ -88,7 +88,7 @@ class Account(UUIDModel, TaggingMixin):
tmp = userService.accounting
tmp.user_service = None
tmp.end = sql_datetime()
tmp.end = sql_now()
tmp.save()
return tmp

View File

@ -35,7 +35,7 @@ import logging
from django.db import models, transaction
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
logger = logging.getLogger(__name__)
@ -69,14 +69,14 @@ class Cache(models.Model):
"""
Purges the cache items that are no longer vaild.
"""
now = sql_datetime()
now = sql_now()
with transaction.atomic():
for v in Cache.objects.all():
if now > v.created + timedelta(seconds=v.validity):
v.delete()
def __str__(self) -> str:
if sql_datetime() > (self.created + timedelta(seconds=self.validity)):
if sql_now() > (self.created + timedelta(seconds=self.validity)):
expired = "Expired"
else:
expired = "Active"

View File

@ -42,13 +42,12 @@ from django.utils.translation import gettext_lazy as _
from django.db import models
from uds.core.util import calendar
from uds.core.util import log
from uds.core.managers.userservice import UserServiceManager
from uds.core import types, consts
from .calendar import Calendar
from .uuid_model import UUIDModel
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
from .service_pool import ServicePool
from .transport import Transport
from .authenticator import Authenticator
@ -136,7 +135,7 @@ class CalendarAction(UUIDModel):
)
return
self.last_execution = sql_datetime()
self.last_execution = sql_now()
params = json.loads(self.params)
should_save_servicepool = save
@ -175,7 +174,7 @@ class CalendarAction(UUIDModel):
def _remove_stuck_userservice() -> None:
# 1.- Remove stuck assigned services (Ignore "creating ones", just for created)
since = sql_datetime() - datetime.timedelta(hours=_numeric_value('hours'))
since = sql_now() - datetime.timedelta(hours=_numeric_value('hours'))
for userService in self.service_pool.assigned_user_services().filter(
state_date__lt=since, state=types.states.State.USABLE
):
@ -273,7 +272,7 @@ class CalendarAction(UUIDModel):
self.service_pool.log(
f'Executed action {description} [{self.pretty_params}]',
level=log.LogLevel.INFO,
level=types.log.LogLevel.INFO,
)
except Exception:
self.service_pool.log(f'Error executing scheduled action {description} [{self.pretty_params}]')
@ -286,7 +285,7 @@ class CalendarAction(UUIDModel):
self.save()
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
last_execution = self.last_execution or sql_datetime()
last_execution = self.last_execution or sql_now()
possibleNext = calendar.CalendarChecker(self.calendar).next_event(
check_from=last_execution - self.offset, start_event=self.at_start
)

View File

@ -43,7 +43,7 @@ from dateutil import rrule as rules
from .uuid_model import UUIDModel
from .calendar import Calendar
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
logger = logging.getLogger(__name__)
@ -188,7 +188,7 @@ class CalendarRule(UUIDModel):
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
logger.debug('Saving...')
self.calendar.modified = sql_datetime()
self.calendar.modified = sql_now()
super().save(*args, **kwargs)
# Ensure saves associated calendar, so next execution of actions is updated with rule values

View File

@ -41,7 +41,7 @@ from uds.core.util import log
from .uuid_model import UUIDModel
from .authenticator import Authenticator
from .user import User
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
# Not imported at runtime, just for type checking
if typing.TYPE_CHECKING:
@ -69,7 +69,7 @@ class Group(UUIDModel):
# if it is false, the user must belong to ALL of the groups to be considered as belonging to this group
meta_if_any = models.BooleanField(default=False)
groups: 'models.ManyToManyField[Group, Group]' = models.ManyToManyField('self', symmetrical=False)
created = models.DateTimeField(default=sql_datetime, blank=True)
created = models.DateTimeField(default=sql_now, blank=True)
skip_mfa = models.CharField(max_length=1, default=State.INACTIVE, db_index=True)
# "fake" declarations for type checking

View File

@ -29,6 +29,7 @@
Author: Adolfo Gómez, dkmaster at dkmon dot com
"""
# pyright: reportUnknownMemberType=false, reportAttributeAccessIssue=false,reportUnknownArgumentType=false
# mypy: disable-error-code="attr-defined, no-untyped-call"
import io
import base64
import logging
@ -42,7 +43,7 @@ from django.http import HttpResponse
from .uuid_model import UUIDModel
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core import consts
logger = logging.getLogger(__name__)
@ -194,7 +195,7 @@ class Image(UUIDModel):
return HttpResponse(self.thumb, content_type='image/png')
def save(self, *args: typing.Any, **kwargs: typing.Any) -> None:
self.stamp = sql_datetime()
self.stamp = sql_now()
return super().save(*args, **kwargs)
def __str__(self) -> str:

View File

@ -43,7 +43,7 @@ from uds.core import consts, types
from uds.core.util import log
from uds.core.util.calendar import CalendarChecker
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
from .group import Group
from .image import Image
from .service_pool import ServicePool
@ -147,7 +147,7 @@ class MetaPool(UUIDModel, TaggingMixin):
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
"""
if chkDateTime is None:
chkDateTime = sql_datetime()
chkDateTime = sql_now()
access = self.fallbackAccess
# Let's see if we can access by current datetime

View File

@ -43,7 +43,7 @@ from uds.core.types.permissions import PermissionType
from .uuid_model import UUIDModel
from .user import User
from .group import Group
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
logger = logging.getLogger(__name__)
@ -121,7 +121,7 @@ class Permissions(UUIDModel):
return existing
except Exception: # Does not exists
return Permissions.objects.create(
created=sql_datetime(),
created=sql_now(),
ends=None,
user=user,
group=group,

View File

@ -39,8 +39,8 @@ from django.db.models import Q
from uds.core import consts, types
from uds.core.consts import MAC_UNKNOWN
from uds.core.types.requests import ExtendedHttpRequest
from uds.core.util import log, net, properties, resolver
from uds.core.util.model import sql_stamp, sql_datetime
from uds.core.util import net, properties, resolver
from uds.core.util.model import sql_stamp, sql_now
from .tag import TaggingMixin
from .uuid_model import UUIDModel
@ -141,9 +141,9 @@ class ServerGroup(UUIDModel, TaggingMixin, properties.PropertiesMixin):
# If not found, try to resolve ip_or_host and search again
try:
ip = resolver.resolve(ip_or_host_or_mac)[0]
found = Server.objects.filter(Q(ip=ip) | Q(hostname=ip))
if found:
return found[0]
found_2 = Server.objects.filter(Q(ip=ip) | Q(hostname=ip))
if found_2:
return found_2[0]
except Exception:
pass
return None
@ -219,7 +219,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
certificate = models.TextField(default='', blank=True)
# Log level, so we can filter messages for this server
log_level = models.IntegerField(default=log.LogLevel.ERROR.value)
log_level = models.IntegerField(default=types.log.LogLevel.ERROR.value)
# Extra data, for server type custom data use (i.e. actor keeps command related data here)
data: typing.Any = models.JSONField(null=True, blank=True, default=None)
@ -295,7 +295,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
if duration is None:
self.locked_until = None
else:
self.locked_until = sql_datetime() + duration
self.locked_until = sql_now() + duration
self.save(update_fields=['locked_until'])
def interpolate_new_assignation(self) -> None:
@ -319,7 +319,7 @@ class Server(UUIDModel, TaggingMixin, properties.PropertiesMixin):
If it is not available, we return False, otherwise True
"""
restrainedUntil = datetime.datetime.fromtimestamp(self.properties.get('available', consts.NEVER_UNIX))
return restrainedUntil > sql_datetime()
return restrainedUntil > sql_now()
def set_restrained_until(self, value: typing.Optional[datetime.datetime] = None) -> None:
"""Sets the availability of this server

View File

@ -42,7 +42,7 @@ from uds.core import consts, exceptions, types
from uds.core.environment import Environment
from uds.core.services.exceptions import InvalidServiceException
from uds.core.util import calendar, log, serializer
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from .account import Account
from .group import Group
@ -223,7 +223,7 @@ class ServicePool(UUIDModel, TaggingMixin):
ServicePool.objects.none()
) # Do not perform any restraint check if we set the globalconfig to 0 (or less)
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
min_ = GlobalConfig.RESTRAINT_COUNT.as_int()
res: list[dict[str, typing.Any]] = []
@ -272,7 +272,7 @@ class ServicePool(UUIDModel, TaggingMixin):
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
return False # Do not perform any restraint check if we set the globalconfig to 0 (or less)
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
if (
self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
>= GlobalConfig.RESTRAINT_COUNT.as_int()
@ -287,14 +287,14 @@ class ServicePool(UUIDModel, TaggingMixin):
if GlobalConfig.RESTRAINT_TIME.as_int() <= 0:
return 0
date = sql_datetime() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
date = sql_now() - timedelta(seconds=GlobalConfig.RESTRAINT_TIME.as_int())
count = self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date).count()
if count < GlobalConfig.RESTRAINT_COUNT.as_int():
return 0
return GlobalConfig.RESTRAINT_TIME.as_int() - int(
(
sql_datetime()
sql_now()
- self.userServices.filter(state=types.states.State.ERROR, state_date__gt=date)
.latest('state_date')
.state_date
@ -347,7 +347,7 @@ class ServicePool(UUIDModel, TaggingMixin):
Checks if the access for a service pool is allowed or not (based esclusively on associated calendars)
"""
if check_datetime is None:
check_datetime = sql_datetime()
check_datetime = sql_now()
access = self.fallbackAccess
# Let's see if we can access by current datetime
@ -368,7 +368,7 @@ class ServicePool(UUIDModel, TaggingMixin):
typing.Optional[int] -- [Returns deadline in secods. If no deadline (forever), will return None]
"""
if check_datetime is None:
check_datetime = sql_datetime()
check_datetime = sql_now()
if self.is_access_allowed(check_datetime) is False:
return -1
@ -425,7 +425,7 @@ class ServicePool(UUIDModel, TaggingMixin):
"""
self.state = state
self.state_date = sql_datetime()
self.state_date = sql_now()
if save:
self.save()
@ -467,7 +467,7 @@ class ServicePool(UUIDModel, TaggingMixin):
Args:
activePub: Active publication used as "current" publication to make checks
"""
now = sql_datetime()
now = sql_now()
nonActivePub: 'ServicePoolPublication'
userService: 'UserService'
@ -684,8 +684,8 @@ class ServicePool(UUIDModel, TaggingMixin):
return bool(self.service) and self.service.test_connectivity(host, port, timeout)
# Utility for logging
def log(self, message: str, level: log.LogLevel = log.LogLevel.INFO) -> None:
log.log(self, level, message, log.LogSource.INTERNAL)
def log(self, message: str, level: types.log.LogLevel = types.log.LogLevel.INFO) -> None:
log.log(self, level, message, types.log.LogSource.INTERNAL)
@staticmethod
def pre_delete(sender: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=unused-argument

View File

@ -42,7 +42,7 @@ from uds.core.environment import Environment
from uds.core.util import log
from .service_pool import ServicePool
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
from .uuid_model import UUIDModel
@ -182,7 +182,7 @@ class ServicePoolPublication(UUIDModel):
save: Defaults to true. If false, record will not be saved to db, just modified
"""
self.state_date = sql_datetime()
self.state_date = sql_now()
self.state = state
self.save(update_fields=['state_date', 'state'])

View File

@ -39,7 +39,7 @@ from django.db import models
from uds.core.managers.crypto import CryptoManager
from .uuid_model import UUIDModel
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core import consts
from .user import User
@ -108,7 +108,7 @@ class TicketStore(UUIDModel):
return TicketStore.objects.create(
uuid=TicketStore.generate_uuid(),
stamp=sql_datetime(),
stamp=sql_now(),
data=data,
validity=validity,
owner=owner,
@ -134,7 +134,7 @@ class TicketStore(UUIDModel):
t = TicketStore.objects.get(uuid=uuid, owner=owner)
validity = datetime.timedelta(seconds=t.validity)
now = sql_datetime()
now = sql_now()
logger.debug('Ticket validity: %s %s', t.stamp + validity, now)
if t.stamp + validity < now:
@ -206,7 +206,7 @@ class TicketStore(UUIDModel):
) -> None:
try:
t = TicketStore.objects.get(uuid=uuid, owner=owner)
t.stamp = sql_datetime()
t.stamp = sql_now()
if validity:
t.validity = validity
t.save(update_fields=['validity', 'stamp'])
@ -283,7 +283,7 @@ class TicketStore(UUIDModel):
@staticmethod
def cleanup() -> None:
now = sql_datetime()
now = sql_now()
for v in TicketStore.objects.all():
if now > v.stamp + datetime.timedelta(
seconds=v.validity + 600

View File

@ -40,7 +40,7 @@ from uds.core.util import log, storage, properties
from .authenticator import Authenticator
from ..core.consts import NEVER
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
from .uuid_model import UUIDModel
# Not imported at runtime, just for type checking
@ -72,7 +72,7 @@ class User(UUIDModel, properties.PropertiesMixin):
is_admin = models.BooleanField(default=False) # is true, this is a super-admin
last_access = models.DateTimeField(default=NEVER)
parent = models.CharField(max_length=50, default=None, null=True)
created = models.DateTimeField(default=sql_datetime, blank=True)
created = models.DateTimeField(default=sql_now, blank=True)
# "fake" declarations for type checking
# objects: 'models.manager.Manager["User"]'
@ -128,7 +128,7 @@ class User(UUIDModel, properties.PropertiesMixin):
"""
Updates the last access for this user with the current time of the sql server
"""
self.last_access = sql_datetime()
self.last_access = sql_now()
self.save(update_fields=['last_access'])
def logout(self, request: 'ExtendedHttpRequest') -> types.auth.AuthenticationResult:

View File

@ -39,7 +39,7 @@ from django.db.models import signals
from uds.core import types, consts
from uds.core.environment import Environment
from uds.core.util import log, properties
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.types.states import State
from uds.models.service_pool import ServicePool
from uds.models.service_pool_publication import ServicePoolPublication
@ -408,7 +408,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
"""
if state != self.state:
self.state_date = sql_datetime()
self.state_date = sql_now()
self.state = state
self.save(update_fields=['state', 'state_date'])
@ -423,7 +423,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
"""
if state != self.os_state:
self.state_date = sql_datetime()
self.state_date = sql_now()
self.os_state = state
self.save(update_fields=['os_state', 'state_date'])
@ -435,7 +435,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
user: User to assing to (db record)
"""
self.cache_level = 0
self.state_date = sql_datetime()
self.state_date = sql_now()
self.user = user
self.save(update_fields=['cache_level', 'state_date', 'user'])
@ -452,7 +452,7 @@ class UserService(UUIDModel, properties.PropertiesMixin):
from uds.core.managers.userservice import UserServiceManager
self.in_use = inUse
self.in_use_date = sql_datetime()
self.in_use_date = sql_now()
self.save(update_fields=['in_use', 'in_use_date'])
# Start/stop accounting
@ -618,8 +618,8 @@ class UserService(UUIDModel, properties.PropertiesMixin):
)
# Utility for logging
def log(self, message: str, level: log.LogLevel = log.LogLevel.INFO) -> None:
log.log(self, level, message, log.LogSource.INTERNAL)
def log(self, message: str, level: types.log.LogLevel = types.log.LogLevel.INFO) -> None:
log.log(self, level, message, types.log.LogSource.INTERNAL)
def test_connectivity(self, host: str, port: 'str|int', timeout:int=4) -> bool:
return self.deployed_service.test_connectivity(host, port, timeout)

View File

@ -36,7 +36,7 @@ from django.db import models
from uds.core.managers.crypto import CryptoManager
from .user_service import UserService
from ..core.util.model import sql_datetime
from ..core.util.model import sql_now
logger = logging.getLogger(__name__)
@ -58,7 +58,7 @@ class UserServiceSession(models.Model): # pylint: disable=too-many-public-metho
session_id = models.CharField(
max_length=128, db_index=True, default=_session_id_generator, blank=True
)
start = models.DateTimeField(default=sql_datetime)
start = models.DateTimeField(default=sql_now)
end = models.DateTimeField(null=True, blank=True)
user_service = models.ForeignKey(
@ -88,5 +88,5 @@ class UserServiceSession(models.Model): # pylint: disable=too-many-public-metho
"""
Ends the session
"""
self.end = sql_datetime()
self.end = sql_now()
self.save(update_fields=['end'])

View File

@ -40,7 +40,7 @@ from django.utils.translation import gettext_noop as _
from uds.core import messaging, exceptions, types
from uds.core.ui import gui
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.util.utils import ignore_exceptions
from . import telegram
@ -162,7 +162,7 @@ class TelegramNotifier(messaging.Notifier):
return # no access token, no messages
# Time of last retrieve
last_check: typing.Optional[datetime.datetime] = self.storage.read_pickled('last_check')
now = sql_datetime()
now = sql_now()
# If last check is not set, we will set it to now
if last_check is None:

View File

@ -119,9 +119,9 @@ class LinuxOsManager(osmanagers.OSManager):
if self.is_removable_on_logout(userservice):
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
'Unused user service for too long. Removing due to OS Manager parameters.',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
userservice.remove()

View File

@ -39,7 +39,7 @@ import collections.abc
from django.utils.translation import gettext_noop as _
from uds.core.ui import gui
from uds.core import exceptions
from uds.core import exceptions, types
from uds.core.util import log
from .linux_osmanager import LinuxOsManager
@ -91,9 +91,9 @@ class LinuxRandomPassManager(LinuxOsManager):
service.store_value('linOsRandomPass', randomPass)
log.log(
service,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Password set to "{randomPass}"',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return randomPass

View File

@ -119,9 +119,9 @@ class TestOSManager(osmanagers.OSManager):
if self.is_removable_on_logout(userservice):
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
'Unused user service for too long. Removing due to OS Manager parameters.',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
userservice.remove()

View File

@ -110,9 +110,9 @@ class WindowsOsManager(osmanagers.OSManager):
if self.is_removable_on_logout(userservice):
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
'Unused user service for too long. Removing due to OS Manager parameters.',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
userservice.remove()

View File

@ -275,9 +275,9 @@ class WinDomainOsManager(WindowsOsManager):
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
log.log(
userservice,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
except ldaputil.ALREADY_EXISTS: # pyright: ignore
# Already added this machine to this group, pass
@ -291,7 +291,7 @@ class WinDomainOsManager(WindowsOsManager):
# logger.exception('Ldap Exception caught')
if error:
log.log(userservice, log.LogLevel.WARNING, error, log.LogSource.OSMANAGER)
log.log(userservice, types.log.LogLevel.WARNING, error, types.log.LogSource.OSMANAGER)
logger.error(error)
def release(self, userservice: 'UserService') -> None:
@ -305,9 +305,9 @@ class WinDomainOsManager(WindowsOsManager):
# logger.info('Releasing from a not FQDN domain is not supported')
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
"Removing a domain machine form a non FQDN domain is not supported.",
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return
@ -317,27 +317,27 @@ class WinDomainOsManager(WindowsOsManager):
logger.warning('Could not find _ldap._tcp.%s', self.domain.as_str())
log.log(
userservice,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Could not remove machine from domain (_ldap._tcp.{self.domain.as_str()} not found)',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return
except ldaputil.LDAPError as e:
# logger.exception('Ldap Exception caught')
log.log(
userservice,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Could not remove machine from domain ({e})',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return
except Exception as e:
# logger.exception('Exception caught')
log.log(
userservice,
log.LogLevel.WARNING,
types.log.LogLevel.WARNING,
f'Could not remove machine from domain ({e})',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return

View File

@ -111,9 +111,9 @@ class WinRandomPassManager(WindowsOsManager):
userservice.store_value('winOsRandomPass', rnd_password)
log.log(
userservice,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Password set to "{rnd_password}"',
log.LogSource.OSMANAGER,
types.log.LogSource.OSMANAGER,
)
return rnd_password

View File

@ -39,9 +39,9 @@ import typing
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from uds.core.types.log import LogObjectType
from uds.core import types
from uds.core.ui import gui
from uds.core.util import dateutils, log
from uds.core.util import dateutils
from uds.models import Log
from .base import ListReport
@ -88,8 +88,8 @@ class ListReportAuditCSV(ListReport):
for i in Log.objects.filter(
created__gte=start,
created__lte=end,
source=log.LogSource.REST,
owner_type=LogObjectType.SYSLOG,
source=types.log.LogSource.REST,
owner_type=types.log.LogObjectType.SYSLOG,
).order_by('-created'):
# extract user, method, response_code and request from data field
m = rx.match(i.data)

View File

@ -38,7 +38,7 @@ import typing
from uds.core import consts, services, types
from uds.core.managers.userservice import UserServiceManager
from uds.core.util import autoserializable, log
from uds.core.util import autoserializable
from uds.core.util.model import sql_stamp_seconds
from .jobs import OVirtDeferredRemoval
@ -248,7 +248,7 @@ class OVirtLinkedUserService(services.UserService, autoserializable.AutoSerializ
self.cache.put('ready', '1')
except Exception as e:
self.do_log(log.LogLevel.ERROR, f'Error on setReady: {e}')
self.do_log(types.log.LogLevel.ERROR, f'Error on setReady: {e}')
# Treat as operation done, maybe the machine is ready and we can continue
return types.states.TaskState.FINISHED
@ -367,7 +367,7 @@ if sys.platform == 'win32':
"""
reason = str(reason)
logger.debug('Setting error state, reason: %s', reason)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._vmid != '': # Powers off
OVirtDeferredRemoval.remove(self.service().provider(), self._vmid)
@ -606,7 +606,7 @@ if sys.platform == 'win32':
if sql_stamp_seconds() - shutdown_start > consts.os.MAX_GUEST_SHUTDOWN_WAIT:
logger.debug('Time is consumed, falling back to stop')
self.do_log(
log.LogLevel.ERROR,
types.log.LogLevel.ERROR,
f'Could not shutdown machine using soft power off in time ({consts.os.MAX_GUEST_SHUTDOWN_WAIT} seconds). Powering off.',
)
# Not stopped by guest in time, but must be stopped normally

View File

@ -37,7 +37,7 @@ import collections.abc
from uds.core import services, types
from uds.core.managers.crypto import CryptoManager
from uds.core.util import log, autoserializable
from uds.core.util import autoserializable
from uds.core.util.model import sql_stamp_seconds
# Not imported at runtime, just for type checking
@ -234,7 +234,7 @@ class OpenGnsysUserService(services.UserService, autoserializable.AutoSerializab
types.states.DeployState.ERROR, so we can do "return self.__error(reason)"
"""
logger.debug('Setting error state, reason: %s', reason)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._machine_id:
try:
@ -315,7 +315,7 @@ class OpenGnsysUserService(services.UserService, autoserializable.AutoSerializab
self._stamp = sql_stamp_seconds()
self.do_log(
log.LogLevel.INFO,
types.log.LogLevel.INFO,
f'Reserved machine {self._name}: id: {self._machine_id}, mac: {self._mac}, ip: {self._ip}',
)

View File

@ -36,7 +36,7 @@ import typing
from uds.core import jobs
from uds import models
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from .provider import OGProvider
from .service import OGService
@ -66,7 +66,7 @@ class OpenGnsysMaintainer(jobs.Job):
service: models.Service
for service in provider.services.all():
instance: OGService = typing.cast(OGService, service.get_instance())
since = sql_datetime() - datetime.timedelta(
since = sql_now() - datetime.timedelta(
hours=instance.max_reserve_hours.as_int() - 8
) # If less than 8 hours of reservation...
# Now mark for removal every CACHED service that is about to expire its reservation on OpenGnsys

View File

@ -37,7 +37,7 @@ import typing
import collections.abc
from uds.core import services, consts, types
from uds.core.util import log, autoserializable
from uds.core.util import autoserializable
from . import on
@ -147,7 +147,7 @@ class OpenNebulaLiveDeployment(services.UserService, autoserializable.AutoSerial
self.cache.put('ready', '1')
except Exception as e:
self.do_log(log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
self.do_log(types.log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
# Treat as operation done, maybe the machine is ready and we can continue
return types.states.TaskState.FINISHED
@ -252,7 +252,7 @@ class OpenNebulaLiveDeployment(services.UserService, autoserializable.AutoSerial
"""
reason = str(reason)
logger.debug('Setting error state, reason: %s', reason)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._vmid: # Powers off & delete it
try:

View File

@ -37,7 +37,7 @@ import pickle # nosec: not insecure, we are loading our own data
import typing
from uds.core import consts, services, types
from uds.core.util import autoserializable, log
from uds.core.util import autoserializable
from .openstack import types as openstack_types
@ -175,7 +175,7 @@ class OpenStackLiveUserService(
self.cache.put('ready', '1')
except Exception as e:
self.do_log(log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
self.do_log(types.log.LogLevel.ERROR, 'Error on setReady: {}'.format(e))
# Treat as operation done, maybe the machine is ready and we can continue
return types.states.TaskState.FINISHED
@ -267,7 +267,7 @@ class OpenStackLiveUserService(
self._queue = [Operation.ERROR]
self._reason = str(reason)
self.do_log(log.LogLevel.ERROR, self._reason)
self.do_log(types.log.LogLevel.ERROR, self._reason)
if self._vmid:
# Creating machines should be deleted on error
@ -278,7 +278,7 @@ class OpenStackLiveUserService(
logger.warning('Can\t set machine %s state to stopped', self._vmid)
else:
self.do_log(
log.LogLevel.INFO, 'Keep on error is enabled, machine will not be marked for deletion'
types.log.LogLevel.INFO, 'Keep on error is enabled, machine will not be marked for deletion'
)
# Fix queue to FINISH and return it
self._queue = [Operation.FINISH]

View File

@ -37,7 +37,6 @@ from django.utils.translation import gettext_noop as _
from uds.core import types
from uds.core.services.generics.fixed.service import FixedService
from uds.core.ui import gui
from uds.core.util import log
from . import helpers
from .deployment_fixed import OpenStackUserServiceFixed
@ -170,7 +169,7 @@ class OpenStackServiceFixed(FixedService): # pylint: disable=too-many-public-me
break
except Exception: # Notifies on log, but skipt it
self.provider().do_log(
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
)
logger.warning(
'The service has machines that cannot be checked on openstack (connection error or machine has been deleted): %s',

View File

@ -120,7 +120,7 @@ class IPMachinesUserService(services.UserService, autoserializable.AutoSerializa
def _error(self, reason: str) -> types.states.TaskState:
if self._vmid:
self.service().unassign(self._vmid)
self.service().unlock_server(self._vmid)
self._vmid = ''
self._ip = ''
self._mac = ''
@ -141,7 +141,7 @@ class IPMachinesUserService(services.UserService, autoserializable.AutoSerializa
def destroy(self) -> types.states.TaskState:
if self._vmid:
self.service().unassign(self._vmid)
self.service().unlock_server(self._vmid)
self._vmid = ''
self._ip = ''
self._mac = ''

View File

@ -37,7 +37,7 @@ from django.utils.translation import gettext_noop as _
from uds.core import exceptions, services, types
from uds.core.ui.user_interface import gui
from uds.core.util import log, net, resolver
from uds.core.util import net, resolver
logger = logging.getLogger(__name__)
@ -126,7 +126,7 @@ class PhysicalMachinesProvider(services.ServiceProvider):
try:
host = resolver.resolve(host)[0]
except Exception as e:
self.do_log(log.LogLevel.WARNING, f'Name {host} could not be resolved')
self.do_log(types.log.LogLevel.WARNING, f'Name {host} could not be resolved')
logger.warning('Name %s could not be resolved (%s)', host, e)
return ''

View File

@ -42,9 +42,8 @@ from uds import models
from uds.core import exceptions, types, services
from uds.core.ui import gui
from uds.core.util import fields
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.core.util import security
from uds.core import services
from .deployment_multi import IPMachinesUserService
@ -148,7 +147,7 @@ class IPMachinesService(services.Service):
return datetime.timedelta(hours=self.max_session_hours.value)
def enumerate_assignables(self) -> collections.abc.Iterable[types.ui.ChoiceItem]:
now = sql_datetime()
now = sql_now()
return [
gui.choice_item(f'{server.host}|{server.mac}', server.uuid)
for server in fields.get_server_group_from_field(self.server_group).servers.all()
@ -163,9 +162,9 @@ class IPMachinesService(services.Service):
) -> types.states.TaskState:
server: 'models.Server' = models.Server.objects.get(uuid=assignable_id)
ipmachine_instance: IPMachinesUserService = typing.cast(IPMachinesUserService, userservice_instance)
if server.locked_until is None or server.locked_until < sql_datetime():
if server.locked_until is None or server.locked_until < sql_now():
# Lock the server for 10 year right now...
server.locked_until = sql_datetime() + datetime.timedelta(days=365)
server.locked_until = sql_now() + datetime.timedelta(days=365)
return ipmachine_instance.assign(server.host)
@ -179,7 +178,7 @@ class IPMachinesService(services.Service):
if self.randomize_host.as_bool() is True:
random.shuffle(list_of_servers) # Reorder the list randomly if required
for server in list_of_servers:
if server.locked_until is None or server.locked_until < sql_datetime():
if server.locked_until is None or server.locked_until < sql_now():
return server.uuid
raise exceptions.services.InsufficientResourcesException()
@ -187,14 +186,14 @@ class IPMachinesService(services.Service):
server = models.Server.objects.get(uuid=server_uuid)
return server.host, server.mac
def assign(self, server_uuid: str) -> None:
def lock_server(self, server_uuid: str) -> None:
try:
server = models.Server.objects.get(uuid=server_uuid)
server.lock(self.get_max_lock_time())
except models.Server.DoesNotExist:
pass
def unassign(self, server_uuid: str) -> None:
def unlock_server(self, server_uuid: str) -> None:
try:
server = models.Server.objects.get(uuid=server_uuid)
server.lock(None)
@ -209,13 +208,14 @@ class IPMachinesService(services.Service):
# Maybe, an user has logged in on an unassigned machine
# if lockForExternalAccess is enabled, we must lock it
if self.lock_on_external_access.as_bool() is True:
self.assign(id)
self.do_log(types.log.LogLevel.DEBUG, f'External login detected for {id}, locking machine for {self.get_max_lock_time()} or until logout')
self.lock_server(id)
def process_logout(self, id: str, remote_login: bool) -> None:
'''
Process logout for a machine and release it.
'''
self.unassign(id)
self.unlock_server(id)
def notify_initialization(self, id: str) -> None:
'''
@ -223,7 +223,7 @@ class IPMachinesService(services.Service):
Normally, this means that it's free
'''
logger.debug('Notify initialization for %s: %s', self, id)
self.unassign(id)
self.unlock_server(id)
# Used by actor API. look parent documentation
def get_valid_id(self, ids: collections.abc.Iterable[str]) -> typing.Optional[str]:

View File

@ -38,7 +38,6 @@ from uds.core import services, types
from uds.core.services.generics.fixed.service import FixedService
from uds.core.services.generics.fixed.userservice import FixedUserService
from uds.core.ui import gui
from uds.core.util import log
from . import helpers
from .deployment_fixed import ProxmoxUserServiceFixed
@ -162,7 +161,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
name='UDS Snapshot',
)
except Exception as e:
self.do_log(log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
self.do_log(types.log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
userservice_instance = typing.cast(ProxmoxUserServiceFixed, userservice_instance)
@ -176,7 +175,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
self.provider().restore_snapshot(vmid, name=snapshot.name)
)
except Exception as e:
self.do_log(log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
self.do_log(types.log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
def get_and_assign(self) -> str:
found_vmid: typing.Optional[str] = None
@ -191,7 +190,7 @@ class ProxmoxServiceFixed(FixedService): # pylint: disable=too-many-public-meth
break
except Exception: # Notifies on log, but skipt it
self.provider().do_log(
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
)
logger.warning(
'The service has machines that cannot be checked on proxmox (connection error or machine has been deleted): %s',

View File

@ -38,7 +38,7 @@ from uds.core.services.generics.dynamic.publication import DynamicPublication
from uds.core.services.generics.dynamic.service import DynamicService
from uds.core.services.generics.dynamic.userservice import DynamicUserService
from uds.core.ui import gui
from uds.core.util import validators, log, fields
from uds.core.util import validators, fields
from . import helpers, jobs
from .deployment_linked import ProxmoxUserserviceLinked
@ -245,7 +245,7 @@ class ProxmoxServiceLinked(DynamicService):
self.disable_machine_ha(vmid)
except Exception as e:
logger.warning('Exception disabling HA for vm %s: %s', vmid, e)
self.do_log(level=log.LogLevel.WARNING, message=f'Exception disabling HA for vm {vmid}: {e}')
self.do_log(level=types.log.LogLevel.WARNING, message=f'Exception disabling HA for vm {vmid}: {e}')
# And remove it
return self.provider().remove_machine(vmid)

View File

@ -36,7 +36,7 @@ import typing
import collections.abc
from uds.core import services, consts, types
from uds.core.util import autoserializable, log
from uds.core.util import autoserializable
from .xen_client import XenPowerState
@ -154,7 +154,7 @@ class XenLinkedDeployment(services.UserService, autoserializable.AutoSerializabl
self.cache.put('ready', '1', 30)
except Exception as e:
# On case of exception, log an an error and return as if the operation was executed
self.do_log(log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
self.do_log(types.log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
# return self.__error('Machine is not available anymore')
return types.states.TaskState.FINISHED
@ -231,7 +231,7 @@ class XenLinkedDeployment(services.UserService, autoserializable.AutoSerializabl
def _error(self, reason: typing.Any) -> types.states.TaskState:
logger.debug('Setting error state, reason: %s', reason)
self.do_log(log.LogLevel.ERROR, reason)
self.do_log(types.log.LogLevel.ERROR, reason)
if self._vmid != '': # Powers off and delete VM
try:

View File

@ -35,7 +35,7 @@ import typing
from uds.core import types
from uds.core.services.generics.fixed.userservice import FixedUserService, Operation
from uds.core.util import log, autoserializable
from uds.core.util import autoserializable
from . import xen_client
@ -79,7 +79,7 @@ class XenFixedUserService(FixedUserService, autoserializable.AutoSerializable):
self.cache.put('ready', '1', 30)
except Exception as e:
# On case of exception, log an an error and return as if the operation was executed
self.do_log(log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
self.do_log(types.log.LogLevel.ERROR, 'Error setting machine state: {}'.format(e))
# return self.__error('Machine is not available anymore')
return types.states.TaskState.FINISHED

View File

@ -38,7 +38,6 @@ from uds.core import consts, services, types
from uds.core.services.generics.fixed.service import FixedService
from uds.core.services.generics.fixed.userservice import FixedUserService
from uds.core.ui import gui
from uds.core.util import log
from uds.core.util.decorators import cached
from . import helpers
@ -214,7 +213,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
name='UDS Snapshot',
)
except Exception as e:
self.do_log(log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
self.do_log(types.log.LogLevel.WARNING, 'Could not create SNAPSHOT for this VM. ({})'.format(e))
def snapshot_recovery(self, userservice_instance: FixedUserService) -> None:
userservice_instance = typing.cast(XenFixedUserService, userservice_instance)
@ -228,7 +227,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
try:
userservice_instance._task = self.provider().restore_snapshot(snapshot['id'])
except Exception as e:
self.do_log(log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
self.do_log(types.log.LogLevel.WARNING, 'Could not restore SNAPSHOT for this VM. ({})'.format(e))
def get_and_assign(self) -> str:
found_vmid: typing.Optional[str] = None
@ -243,7 +242,7 @@ class XenFixedService(FixedService): # pylint: disable=too-many-public-methods
break
except Exception: # Notifies on log, but skipt it
self.provider().do_log(
log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
types.log.LogLevel.WARNING, 'Machine {} not accesible'.format(found_vmid)
)
logger.warning(
'The service has machines that cannot be checked on xen (connection error or machine has been deleted): %s',

View File

@ -40,7 +40,7 @@ from uds import models
from uds.core import transports, types, ui, consts
from uds.core.managers.crypto import CryptoManager
from uds.core.util import fields
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
# Not imported at runtime, just for type checking
if typing.TYPE_CHECKING:
@ -458,7 +458,7 @@ class HTML5RDPTransport(transports.Transport):
+ '_'
+ sanitize(user.name)
+ '/'
+ sql_datetime().strftime('%Y%m%d-%H%M')
+ sql_now().strftime('%Y%m%d-%H%M')
)
params['create-recording-path'] = 'true'

View File

@ -48,7 +48,7 @@ from uds.core.services.exceptions import (
)
from uds.core.util import html
from uds.core.util.config import GlobalConfig
from uds.core.util.model import sql_datetime
from uds.core.util.model import sql_now
from uds.models import MetaPool, Network, ServicePool, ServicePoolGroup, TicketStore, Transport
# Not imported at runtime, just for type checking
@ -129,7 +129,7 @@ def get_services_info_dict(
available_metapools = list(
MetaPool.metapools_for_groups(groups, request.user)
) # Pass in user to get "number_assigned" to optimize
now = sql_datetime()
now = sql_now()
# Information for administrators
nets = ''

View File

@ -36,6 +36,7 @@ import collections.abc
from django.http import HttpResponse
from django.views.decorators.cache import cache_page, never_cache
from uds.core import types
from uds.core.auths.auth import web_login_required, web_password
from uds.core.managers.userservice import UserServiceManager
from uds.core.types.requests import ExtendedHttpRequest
@ -199,11 +200,11 @@ def action(request: 'ExtendedHttpRequestWithUser', service_id: str, action_strin
rebuild = True
log.log(
userService.deployed_service,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
"Removing User Service {} as requested by {} from {}".format(
userService.friendly_name, request.user.pretty_name, request.ip
),
log.LogSource.WEB,
types.log.LogSource.WEB,
)
UserServiceManager().request_logoff(userService)
userService.release()
@ -215,11 +216,11 @@ def action(request: 'ExtendedHttpRequestWithUser', service_id: str, action_strin
rebuild = True
log.log(
userService.deployed_service,
log.LogLevel.INFO,
types.log.LogLevel.INFO,
"Reseting User Service {} as requested by {} from {}".format(
userService.friendly_name, request.user.pretty_name, request.ip
),
log.LogSource.WEB,
types.log.LogSource.WEB,
)
# UserServiceManager().requestLogoff(userService)
UserServiceManager().reset(userService)

View File

@ -32,7 +32,7 @@ import logging
from unittest import mock
from uds.core.util import log
from uds.core import types
from ...utils import rest
from ...fixtures import servers as servers_fixtures
@ -71,7 +71,7 @@ class ServerEventsLogTest(rest.test.RESTTestCase):
)
self.assertEqual(response.status_code, 200)
# First call shout have
the_log.assert_any_call(server, log.LogLevel.INFO, 'test message', log.LogSource.SERVER, None)
the_log.assert_any_call(server, types.log.LogLevel.INFO, 'test message', types.log.LogSource.SERVER, None)
# Now notify to an userService
response = self.client.rest_post(
@ -87,7 +87,7 @@ class ServerEventsLogTest(rest.test.RESTTestCase):
self.assertEqual(response.status_code, 200)
the_log.assert_any_call(
userService, log.LogLevel.INFO, 'test message userservice', log.LogSource.SERVER, None
userService, types.log.LogLevel.INFO, 'test message userservice', types.log.LogSource.SERVER, None
)
def test_event_log_fail(self) -> None:

View File

@ -62,7 +62,7 @@ class ServerRegisterTest(rest.test.RESTTestCase):
'subtype': crypto.CryptoManager.manager().random_string(10),
'os': '', # To be set on tests
'hostname': 'test',
'log_level': log.LogLevel.INFO.value,
'log_level': types.log.LogLevel.INFO.value,
'mac': random_mac(),
}
self.login(as_admin=False) # As staff

Some files were not shown because too many files have changed in this diff Show More