1
0
mirror of https://github.com/dkmstr/openuds.git synced 2025-01-08 21:18:00 +03:00

fixes for python 3.7

This commit is contained in:
Adolfo Gómez García 2019-07-29 13:13:49 +02:00
parent a948188187
commit 82ef65a26c
7 changed files with 58 additions and 80 deletions

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# Copyright (c) 2012-2019 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@ -32,7 +32,8 @@
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
from __future__ import unicode_literals
import logging
import dns
from django.utils.translation import ugettext_noop as _
from uds.core.auths import Authenticator
@ -41,10 +42,7 @@ from uds.core.ui import gui
from uds.core.managers import cryptoManager
from uds.core.util.State import State
from uds.core.util.request import getRequest
import dns
import logging
__updated__ = '2016-04-20'
logger = logging.getLogger(__name__)
@ -102,7 +100,7 @@ class InternalDBAuth(Authenticator):
return username
def authenticate(self, username, credentials, groupsManager):
logger.debug('Username: {0}, Password: {1}'.format(username, credentials))
logger.debug('Username: %s, Password: %s', username, credentials)
auth = self.dbAuthenticator()
try:
try:
@ -125,6 +123,16 @@ class InternalDBAuth(Authenticator):
def createUser(self, usrData):
pass
def getGroups(self, username, groupsManager):
auth = self.dbAuthenticator()
try:
usr = auth.users.get(name=username, state=State.ACTIVE)
except Exception:
return
groupsManager.validate([g.name for g in usr.groups.all()])
@staticmethod
def test(env, data):
return [True, _("Internal structures seems ok")]

View File

@ -434,38 +434,16 @@ class Authenticator(Module): # pylint: disable=too-many-public-methods
"""
raise NotImplementedError
def getJavascript(self, request: HttpRequest):
def getJavascript(self, request: HttpRequest) -> typing.Optional[str]:
"""
If you override this method, and returns something different of None,
UDS will consider your authenticator as "Owner draw", that is, that it
will not use the standard form for user authentication.
Args:
Request is the DJango request received for generating this html,
Request is the DJango request received for generating this javascript,
with included user ip at request.ip.
We have here a few things that we should know for creating our own
html for authenticator:
* The id of the username input field is **id_user**
* The id of the password input field is **id_password**
* The id of the login form is **loginform**
* The id of the "back to login" link is **backToLogin**
This is what happens when an authenticator that has getJavascript method is
selected in the front end (from the combo shown):
* The div with id **login** is hidden.
* The div with id **nonStandard** is shown
* Using Ajax, the html provided by this method is requested for
the authenticator
* The returned html is rendered inside **nonStandardLogin** div.
* The **nonStandard** div is shown.
**nonStandard** div has two inner divs, **nonStandardLogin** and
**divBackToLogin**. If there is no standard auths, divBackToLogin is
erased.
With this, and :py:meth:.authCallback method, we can add SSO engines
to UDS with no much problems.
"""

View File

@ -69,7 +69,7 @@ class DelayedTask(Environmentable):
raise NotImplementedError
def register(self, suggestedTime: int, tag: str = '', check: bool = True):
def register(self, suggestedTime: int, tag: str = '', check: bool = True) -> None:
"""
Utility method that allows to register a Delayedtask
"""

View File

@ -110,7 +110,7 @@ class DelayedTaskRunner:
task = DBDelayedTask.objects.select_for_update().filter(filt).order_by('execution_time')[0] # @UndefinedVariable
if task.insert_date > now + timedelta(seconds=30):
logger.warning('EXecuted %s due to insert_date being in the future!', task.type)
taskInstanceDump = encoders.decode(task.instance, 'base64')
taskInstanceDump = typing.cast(bytes, encoders.decode(task.instance, 'base64'))
task.delete()
taskInstance = pickle.loads(taskInstanceDump)
except IndexError:

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# Copyright (c) 2012-2019 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@ -30,18 +30,13 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
from __future__ import unicode_literals
import six
import datetime
import logging
__updated__ = '2014-11-26'
logger = logging.getLogger(__name__)
class JobsFactory(object):
class JobsFactory:
_factory = None
def __init__(self):
@ -57,11 +52,11 @@ class JobsFactory(object):
return self._jobs
def insert(self, name, type_):
logger.debug('Inserting job {0} of type_ {1}'.format(name, type_))
logger.debug('Inserting job %s of type_ %s', name, type_)
try:
self._jobs[name] = type_
except Exception as e:
logger.debug('Exception at insert in JobsFactory: {0}, {1}'.format(e.__class__, e))
logger.debug('Exception at insert in JobsFactory: %s, %s', e.__class__, e)
def ensureJobsInDatabase(self):
from uds.models import Scheduler, getSqlDatetime
@ -71,7 +66,7 @@ class JobsFactory(object):
try:
logger.debug('Ensuring that jobs are registered inside database')
workers.initialize()
for name, type_ in six.iteritems(self._jobs):
for name, type_ in self._jobs.items():
try:
type_.setup()
# We use database server datetime
@ -79,14 +74,14 @@ class JobsFactory(object):
next_ = now
job = Scheduler.objects.create(name=name, frecuency=type_.frecuency, last_execution=now, next_execution=next_, state=State.FOR_EXECUTE)
except Exception: # already exists
logger.debug('Already added {0}'.format(name))
logger.debug('Already added %s', name)
job = Scheduler.objects.get(name=name)
job.frecuency = type_.frecuency
if job.next_execution > job.last_execution + datetime.timedelta(seconds=type_.frecuency):
job.next_execution = job.last_execution + datetime.timedelta(seconds=type_.frecuency)
job.save()
except Exception as e:
logger.debug('Exception at ensureJobsInDatabase in JobsFactory: {0}, {1}'.format(e.__class__, e))
logger.debug('Exception at ensureJobsInDatabase in JobsFactory: %s, %s', e.__class__, e)
def lookup(self, typeName):
try:

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# Copyright (c) 2012-2019 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@ -30,20 +30,17 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
from __future__ import unicode_literals
import platform
import threading
import time
import logging
from datetime import timedelta
from django.db.models import Q
from django.db import transaction, DatabaseError, connection
from uds.models import Scheduler as dbScheduler, getSqlDatetime
from uds.core.util.State import State
from uds.core.jobs.JobsFactory import JobsFactory
from datetime import timedelta
import platform
import threading
import time
import logging
__updated__ = '2018-03-02'
logger = logging.getLogger(__name__)
@ -64,7 +61,7 @@ class JobThread(threading.Thread):
try:
self._jobInstance.execute()
except Exception:
logger.warning("Exception executing job {0}".format(self._dbJobId))
logger.warning("Exception executing job %s", self._dbJobId)
finally:
self.jobDone()
@ -82,7 +79,7 @@ class JobThread(threading.Thread):
try:
connection.close()
except Exception as e:
logger.error('On job executor, closing db connection: {}'.format(e))
logger.error('On job executor, closing db connection: %s', e)
# logger.info('Database access failed... Retrying')
time.sleep(1)
@ -114,7 +111,7 @@ class Scheduler(object):
def __init__(self):
self._hostname = platform.node()
self._keepRunning = True
logger.info('Initialized scheduler for host "{}"'.format(self._hostname))
logger.info('Initialized scheduler for host "%s"', self._hostname)
@staticmethod
def scheduler():
@ -144,7 +141,7 @@ class Scheduler(object):
# This params are all set inside fltr (look at __init__)
job = dbScheduler.objects.select_for_update().filter(fltr).order_by('next_execution')[0] # @UndefinedVariable
if job.last_execution > now:
logger.warning('EXecuted {} due to last_execution being in the future!'.format(job.name))
logger.warning('EXecuted %s due to last_execution being in the future!', job.name)
job.state = State.RUNNING
job.owner_server = self._hostname
job.last_execution = now
@ -153,10 +150,10 @@ class Scheduler(object):
jobInstance = job.getInstance()
if jobInstance is None:
logger.error('Job instance can\'t be resolved for {0}, removing it'.format(job))
logger.error('Job instance can\'t be resolved for %s, removing it', job)
job.delete()
return
logger.debug('Executing job:>{0}<'.format(job.name))
logger.debug('Executing job:>%s<', job.name)
JobThread(jobInstance, job).start() # Do not instatiate thread, just run it
except IndexError:
# Do nothing, there is no jobs for execution
@ -197,7 +194,7 @@ class Scheduler(object):
# This can happen often on sqlite, and this is not problem at all as we recover it.
# The log is removed so we do not get increased workers.log file size with no information at all
if not isinstance(e, DatabaseError):
logger.error('Unexpected exception at run loop {0}: {1}'.format(e.__class__, e))
logger.error('Unexpected exception at run loop %s: %s', e.__class__, e)
try:
connection.close()
except Exception:

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2018 Virtual Cable S.L.
# Copyright (c) 2012-2019 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@ -30,23 +30,24 @@
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
from __future__ import unicode_literals
from functools import wraps
import typing
import logging
import inspect
from uds.core.util.html import checkBrowser
from uds.web.util import errors
from functools import wraps
import logging
__updated__ = '2018-10-07'
logger = logging.getLogger(__name__)
# Decorator that protects pages that needs at least a browser version
# Default is to deny IE < 9
def denyBrowsers(browsers=None, errorResponse=lambda request: errors.errorView(request, errors.BROWSER_NOT_SUPPORTED)):
def denyBrowsers(
browsers: typing.Optional[typing.List[str]] = None,
errorResponse: typing.Callable = lambda request: errors.errorView(request, errors.BROWSER_NOT_SUPPORTED)
):
"""
Decorator to set protection to access page
Look for samples at uds.core.web.views
@ -77,19 +78,14 @@ def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
import inspect
@wraps(func)
def new_func(*args, **kwargs):
try:
caller = inspect.stack()[1]
logger.warning(
"Call to deprecated function {0} from {1}:{2}.".format(func.__name__,
caller[1], caller[2]
)
)
logger.warning('Call to deprecated function %s from %s:%s.', func.__name__, caller[1], caller[2])
except Exception:
logger.info('No stack info on deprecated function call {0}'.format(func.__name__))
logger.info('No stack info on deprecated function call %s', func.__name__)
return func(*args, **kwargs)
@ -100,7 +96,12 @@ def deprecated(func):
#
# Decorator for caching
# Decorator that tries to get from cache before executing
def allowCache(cachePrefix, cacheTimeout, cachingArgs=None, cachingKeyFnc=None):
def allowCache(
cachePrefix: str,
cacheTimeout: int,
cachingArgs: typing.Optional[typing.Union[typing.List[int], int]] = None,
cachingKeyFnc: typing.Optional[typing.Callable] = None
):
"""Decorator that give us a "quick& clean" caching feature on service providers.
Note: This decorator is intended ONLY for service providers
@ -111,10 +112,9 @@ def allowCache(cachePrefix, cacheTimeout, cachingArgs=None, cachingKeyFnc=None):
First arg (self) is 0, so normally cachingArgs are 1, or [1,2,..]
"""
if not cachingKeyFnc:
cachingKeyFnc = lambda x:''
def allowCacheDecorator(fnc):
cachingKeyFnc = lambda x: ''
def allowCacheDecorator(fnc: typing.Callable):
@wraps(fnc)
def wrapper(*args, **kwargs):
if cachingArgs is not None:
@ -140,7 +140,7 @@ def allowCache(cachePrefix, cacheTimeout, cachingArgs=None, cachingKeyFnc=None):
# Maybe returned data is not serializable. In that case, cache will fail but no harm is done with this
args[0].cache.put(cacheKey, data, cacheTimeout)
except Exception as e:
logger.debug('Data for {} is not serializable, not cached. {} ({})'.format(cacheKey, data, e))
logger.debug('Data for %s is not serializable on call to %s, not cached. %s (%s)', cacheKey, fnc.__name__, data, e)
return data
return wrapper