mirror of
https://github.com/dkmstr/openuds.git
synced 2025-01-03 01:17:56 +03:00
adding __slots__ to optimize a bit code
This commit is contained in:
parent
a6c6bca2fd
commit
552ba3796b
@ -47,6 +47,7 @@ class Environment:
|
||||
not stored with main module data.
|
||||
The environment is composed of a "cache" and a "storage". First are volatile data, while second are persistent data.
|
||||
"""
|
||||
__slots__ = ['_key', '_cache', '_storage', '_idGenerators']
|
||||
|
||||
_key: str
|
||||
_cache: Cache
|
||||
@ -173,6 +174,9 @@ class Environmentable:
|
||||
"""
|
||||
This is a base class provided for all objects that have an environment associated. These are mainly modules
|
||||
"""
|
||||
__slots__ = ['_env']
|
||||
|
||||
_env: Environment
|
||||
|
||||
def __init__(self, environment: Environment):
|
||||
"""
|
||||
|
@ -42,6 +42,7 @@ class DelayedTask(Environmentable):
|
||||
This class represents a single delayed task object.
|
||||
This is an object that represents an execution to be done "later"
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
|
@ -44,6 +44,7 @@ from django.db.models import Q
|
||||
from uds.models import DelayedTask as DBDelayedTask
|
||||
from uds.models import getSqlDatetime
|
||||
from uds.core.environment import Environment
|
||||
from uds.core.util import singleton
|
||||
|
||||
from .delayed_task import DelayedTask
|
||||
|
||||
@ -54,6 +55,7 @@ class DelayedTaskThread(threading.Thread):
|
||||
"""
|
||||
Class responsible of executing a delayed task in its own thread
|
||||
"""
|
||||
__slots__ = ('_taskInstance',)
|
||||
|
||||
_taskInstance: DelayedTask
|
||||
|
||||
@ -70,29 +72,27 @@ class DelayedTaskThread(threading.Thread):
|
||||
connections['default'].close()
|
||||
|
||||
|
||||
class DelayedTaskRunner:
|
||||
class DelayedTaskRunner(metaclass=singleton.Singleton):
|
||||
"""
|
||||
Delayed task runner class
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
granularity: int = 2 # we check for delayed tasks every "granularity" seconds
|
||||
|
||||
# to keep singleton DelayedTaskRunner
|
||||
_runner: typing.ClassVar[typing.Optional['DelayedTaskRunner']] = None
|
||||
_hostname: str
|
||||
_keepRunning: bool
|
||||
granularity: typing.ClassVar[int] = 2 # we check for delayed tasks every "granularity" seconds
|
||||
_hostname: typing.ClassVar[str] # "Our" hostname
|
||||
_keepRunning: typing.ClassVar[bool] # If we should keep it running
|
||||
|
||||
def __init__(self):
|
||||
self._hostname = gethostname()
|
||||
self._keepRunning = True
|
||||
logger.debug("Initializing delayed task runner for host %s", self._hostname)
|
||||
DelayedTaskRunner._hostname = gethostname()
|
||||
DelayedTaskRunner._keepRunning = True
|
||||
|
||||
def notifyTermination(self) -> None:
|
||||
"""
|
||||
Invoke this whenever you want to terminate the delayed task runner thread
|
||||
It will mark the thread to "stop" ASAP
|
||||
"""
|
||||
self._keepRunning = False
|
||||
DelayedTaskRunner._keepRunning = False
|
||||
|
||||
@staticmethod
|
||||
def runner() -> 'DelayedTaskRunner':
|
||||
@ -101,9 +101,7 @@ class DelayedTaskRunner:
|
||||
There is only one instance of DelayedTaksRunner, but its "run" method is executed on
|
||||
many thread (depending on configuration). They all share common Instance data
|
||||
"""
|
||||
if DelayedTaskRunner._runner is None:
|
||||
DelayedTaskRunner._runner = DelayedTaskRunner()
|
||||
return DelayedTaskRunner._runner
|
||||
return DelayedTaskRunner()
|
||||
|
||||
def executeOneDelayedTask(self) -> None:
|
||||
now = getSqlDatetime()
|
||||
@ -142,7 +140,7 @@ class DelayedTaskRunner:
|
||||
taskInstance.env = Environment.getEnvForType(taskInstance.__class__)
|
||||
DelayedTaskThread(taskInstance).start()
|
||||
|
||||
def __insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
|
||||
def _insert(self, instance: DelayedTask, delay: int, tag: str) -> None:
|
||||
now = getSqlDatetime()
|
||||
exec_time = now + timedelta(seconds=delay)
|
||||
cls = instance.__class__
|
||||
@ -170,7 +168,7 @@ class DelayedTaskRunner:
|
||||
while retries > 0:
|
||||
retries -= 1
|
||||
try:
|
||||
self.__insert(instance, delay, tag)
|
||||
self._insert(instance, delay, tag)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.info('Exception inserting a delayed task %s: %s', e.__class__, e)
|
||||
@ -210,7 +208,7 @@ class DelayedTaskRunner:
|
||||
|
||||
def run(self) -> None:
|
||||
logger.debug("At loop")
|
||||
while self._keepRunning:
|
||||
while DelayedTaskRunner._keepRunning:
|
||||
try:
|
||||
time.sleep(self.granularity)
|
||||
self.executeOneDelayedTask()
|
||||
|
@ -39,15 +39,16 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Job(Environmentable):
|
||||
__slots__ = ('frequency',)
|
||||
# Default frecuency, once a day. Remenber that precision will be based on "granurality" of Scheduler
|
||||
# If a job is used for delayed execution, this attribute is in fact ignored
|
||||
frecuency: int = (
|
||||
frecuency: typing.ClassVar[int] = (
|
||||
24 * 3600 + 3
|
||||
) # Defaults to a big one, and i know frecuency is written as frequency, but this is an "historical mistake" :)
|
||||
frecuency_cfg: typing.Optional[
|
||||
Config.Value
|
||||
frecuency_cfg: typing.ClassVar[
|
||||
typing.Optional[Config.Value]
|
||||
] = None # If we use a configuration variable from DB, we need to update the frecuency asap, but not before app is ready
|
||||
friendly_name = 'Unknown'
|
||||
friendly_name: typing.ClassVar[str] = 'Unknown'
|
||||
|
||||
@classmethod
|
||||
def setup(cls: typing.Type['Job']) -> None:
|
||||
|
@ -68,10 +68,15 @@ class DelayedTaskThread(BaseThread):
|
||||
|
||||
|
||||
class TaskManager(metaclass=singleton.Singleton):
|
||||
keepRunning: bool = True
|
||||
threads: typing.List[BaseThread] = []
|
||||
|
||||
__slots__ = ('threads', 'keepRunning')
|
||||
|
||||
keepRunning: bool
|
||||
threads: typing.List[BaseThread]
|
||||
|
||||
def __init__(self):
|
||||
self.keepRunning = True
|
||||
self.threads = []
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
|
@ -97,6 +97,7 @@ class Module(UserInterface, Environmentable, Serializable):
|
||||
Environmentable is a base class that provides utility method to access a separate Environment for every single
|
||||
module.
|
||||
"""
|
||||
__slots__ = ['_uuid']
|
||||
# Import variable indicating this module is a base class not a real module
|
||||
# Note that Module is not a real module, but a base class for all modules so isBase is not used on this class
|
||||
isBase: typing.ClassVar[bool] = False
|
||||
|
@ -43,6 +43,7 @@ class Serializable:
|
||||
- Initialize the object with default values
|
||||
- Read values from seralized data
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
@ -53,14 +53,16 @@ ONE_DAY = 3600 * 24
|
||||
|
||||
|
||||
class CalendarChecker:
|
||||
__slots__ = ('calendar',)
|
||||
|
||||
calendar: Calendar
|
||||
|
||||
# For performance checking
|
||||
updates: int = 0
|
||||
cache_hit: int = 0
|
||||
hits: int = 0
|
||||
updates: typing.ClassVar[int] = 0
|
||||
cache_hit: typing.ClassVar[int] = 0
|
||||
hits: typing.ClassVar[int] = 0
|
||||
|
||||
cache = Cache('calChecker')
|
||||
cache: typing.ClassVar[Cache] = Cache('calChecker')
|
||||
|
||||
def __init__(self, calendar: Calendar) -> None:
|
||||
self.calendar = calendar
|
||||
|
@ -44,6 +44,7 @@ class RedirectMiddleware:
|
||||
|
||||
Some paths will not be redirected, to avoid problems, but they are advised to use SSL (this is for backwards compat)
|
||||
"""
|
||||
__slots__ = ('get_response',)
|
||||
|
||||
NO_REDIRECT: typing.ClassVar[typing.List[str]] = [
|
||||
'rest',
|
||||
|
@ -51,6 +51,8 @@ CHECK_SECONDS = 3600 * 24 # Once a day is more than enough
|
||||
|
||||
|
||||
class GlobalRequestMiddleware:
|
||||
__slots__ = ('_get_response',)
|
||||
|
||||
lastCheck: typing.ClassVar[datetime.datetime] = datetime.datetime.now()
|
||||
|
||||
def __init__(self, get_response: typing.Callable[[HttpRequest], HttpResponse]):
|
||||
|
@ -46,6 +46,7 @@ class UDSSecurityMiddleware:
|
||||
'''
|
||||
This class contains all the security checks done by UDS in order to add some extra protection.
|
||||
'''
|
||||
__slots__ = ('get_response',)
|
||||
|
||||
get_response: typing.Any # typing.Callable[['HttpRequest'], 'HttpResponse']
|
||||
|
||||
|
@ -40,6 +40,7 @@ class XUACompatibleMiddleware:
|
||||
This header tells to Internet Explorer to render page with latest
|
||||
possible version or to use chrome frame if it is installed.
|
||||
"""
|
||||
__slots__ = ('get_response',)
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
@ -8,14 +8,14 @@ class Singleton(type):
|
||||
class MyClass(metaclass=Singleton):
|
||||
...
|
||||
'''
|
||||
__instance: typing.Optional[typing.Any]
|
||||
_instance: typing.Optional[typing.Any]
|
||||
|
||||
# We use __init__ so we customise the created class from this metaclass
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
self.__instance = None
|
||||
self._instance = None
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __call__(self, *args, **kwargs) -> typing.Any:
|
||||
if self.__instance is None:
|
||||
self.__instance = super().__call__(*args, **kwargs)
|
||||
return self.__instance
|
||||
if self._instance is None:
|
||||
self._instance = super().__call__(*args, **kwargs)
|
||||
return self._instance
|
||||
|
@ -38,6 +38,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UniqueGIDGenerator(UniqueIDGenerator):
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self, owner, baseName=None):
|
||||
super().__init__('id', owner, baseName)
|
||||
|
||||
|
@ -53,7 +53,11 @@ class CreateNewIdException(Exception):
|
||||
|
||||
|
||||
class UniqueIDGenerator:
|
||||
__slots__ = ('_owner', '_baseName')
|
||||
|
||||
# owner is the owner of the UniqueID
|
||||
_owner: str
|
||||
# base name for filtering unique ids. (I.e. "mac", "ip", "ipv6" ....)
|
||||
_baseName: str
|
||||
|
||||
def __init__(
|
||||
|
@ -39,6 +39,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UniqueMacGenerator(UniqueIDGenerator):
|
||||
__slots__ = ('_macRange',)
|
||||
|
||||
def __init__(self, owner: str) -> None:
|
||||
super().__init__('mac', owner, '\tmac')
|
||||
|
||||
|
@ -39,6 +39,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# noinspection PyMethodOverriding
|
||||
class UniqueNameGenerator(UniqueIDGenerator):
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self, owner):
|
||||
super().__init__('name', owner)
|
||||
|
||||
@ -53,7 +55,7 @@ class UniqueNameGenerator(UniqueIDGenerator):
|
||||
maxVal = 10 ** length - 1
|
||||
return self.__toName(super().get(minVal, maxVal), length)
|
||||
|
||||
def transfer(self, baseName: str, name: str, toUNGen: 'UniqueNameGenerator'): # type: ignore # pylint: disable=arguments-differ
|
||||
def transfer(self, baseName: str, name: str, toUNGen: 'UniqueNameGenerator') -> None: # type: ignore
|
||||
self.setBaseName(baseName)
|
||||
super().transfer(int(name[len(self._baseName) :]), toUNGen)
|
||||
|
||||
|
@ -170,3 +170,21 @@ def validateMacRange(macRange: str) -> str:
|
||||
)
|
||||
|
||||
return macRange
|
||||
|
||||
def validateEmail(email: str) -> str:
|
||||
"""
|
||||
Validates that an email is valid
|
||||
:param email: email to validate
|
||||
:return: Raises Module.Validation exception if is invalid, else return the value "fixed"
|
||||
"""
|
||||
if len(email) > 254:
|
||||
raise Module.ValidationException(
|
||||
_('Email address is too long')
|
||||
)
|
||||
|
||||
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
|
||||
raise Module.ValidationException(
|
||||
_('Email address is not valid')
|
||||
)
|
||||
|
||||
return email
|
||||
|
Loading…
Reference in New Issue
Block a user