1
0
mirror of https://github.com/dkmstr/openuds.git synced 2025-01-11 05:17:55 +03:00

BIG work on pep8 adaption (easier to read, easier to maintain, etc..)

This commit is contained in:
Adolfo Gómez 2014-02-25 03:12:00 +00:00
parent 89addaf585
commit 06ff8e32be
68 changed files with 2889 additions and 2829 deletions

View File

@ -32,7 +32,7 @@
'''
from __future__ import unicode_literals
#from django.utils import simplejson as json
# from django.utils import simplejson as json
import ujson as json
import datetime
import time
@ -47,6 +47,7 @@ logger = logging.getLogger(__name__)
class ParametersException(Exception):
pass
class ContentProcessor(object):
mime_type = None
extensions = None
@ -60,12 +61,11 @@ class ContentProcessor(object):
return self._request.GET.copy()
def processParameters(self):
return ''
def getResponse(self, obj):
return http.HttpResponse(content = self.render(obj), content_type=self.mime_type + "; charset=utf-8")
return http.HttpResponse(content=self.render(obj), content_type=self.mime_type + "; charset=utf-8")
def render(self, obj):
return unicode(obj)
@ -93,6 +93,7 @@ class ContentProcessor(object):
return int(time.mktime(obj.timetuple()))
return unicode(obj)
# ---------------
# Json Processor
# ---------------
@ -111,13 +112,13 @@ class JsonProcessor(ContentProcessor):
logger.error('parsing json: {0}'.format(e))
raise ParametersException(unicode(e))
def render(self, obj):
return json.dumps( ContentProcessor.procesForRender(obj))
#return json.dumps(obj)
return json.dumps(ContentProcessor.procesForRender(obj))
# return json.dumps(obj)
# ---------------
# Json Processor
# XML Processor
# ---------------
class XMLProcessor(ContentProcessor):
mime_type = 'application/xml'
@ -126,7 +127,7 @@ class XMLProcessor(ContentProcessor):
def processParameters(self):
return ''
processors_list = (JsonProcessor,XMLProcessor)
processors_list = (JsonProcessor, XMLProcessor)
default_processor = JsonProcessor
available_processors_mime_dict = dict((cls.mime_type, cls) for cls in processors_list)
available_processors_ext_dict = {}

View File

@ -36,10 +36,14 @@ from django.utils.translation import ugettext as _
from uds.core.ui.UserInterface import UserInterface
from uds.core import Environmentable
from uds.core import Serializable
import base64, os.path, sys, logging
import base64
import os.path
import sys
import logging
logger = logging.getLogger(__name__)
class Module(UserInterface, Environmentable, Serializable):
'''
Base class for all modules used by UDS.
@ -90,17 +94,17 @@ class Module(UserInterface, Environmentable, Serializable):
Environmentable is a base class that provides utility method to access a separate Environment for every single
module.
'''
#: Which coded to use to encode module by default.
#: This overrides the Environmentable and Serializable Attribute, but in all cases we are using 'base64'
# : Which coded to use to encode module by default.
# : This overrides the Environmentable and Serializable Attribute, but in all cases we are using 'base64'
CODEC = 'base64' # Can be zip, hez, bzip, base64, uuencoded
#: Basic name used to provide the administrator an "huma readable" form for the module
# : Basic name used to provide the administrator an "huma readable" form for the module
typeName = 'Base Module'
#: Internal type name, used by system to locate this module
# : Internal type name, used by system to locate this module
typeType = 'BaseModule'
#: Description of this module, used at admin level
# : Description of this module, used at admin level
typeDescription = 'Base Module'
#: Icon file, relative to module folders
# : Icon file, relative to module folders
iconFile = 'base.png' # This is expected to be png, use this format always
class ValidationException(Exception):
@ -150,9 +154,8 @@ class Module(UserInterface, Environmentable, Serializable):
'''
return _(cls.typeDescription)
@classmethod
def icon(cls, inBase64 = True):
def icon(cls, inBase64=True):
'''
Reads the file specified by iconFile at module folder, and returns it content.
This is used to obtain an icon so administration can represent it.
@ -167,7 +170,7 @@ class Module(UserInterface, Environmentable, Serializable):
'iconFile' class attribute
'''
logger.debug('Loading icon for class {0} ({1})'.format(cls, cls.iconFile))
file_ = open( os.path.dirname(sys.modules[cls.__module__].__file__) + '/' + cls.iconFile, 'rb')
file_ = open(os.path.dirname(sys.modules[cls.__module__].__file__) + '/' + cls.iconFile, 'rb')
data = file_.read()
file_.close()
if inBase64 == True:
@ -196,7 +199,7 @@ class Module(UserInterface, Environmentable, Serializable):
'''
return [True, _("No connection checking method is implemented.")]
def __init__(self, environment, values = None):
def __init__(self, environment, values=None):
'''
Do not forget to invoke this in your derived class using
"super(self.__class__, self).__init__(environment, values)".
@ -276,4 +279,3 @@ class Module(UserInterface, Environmentable, Serializable):
Nothing
'''
pass

View File

@ -35,6 +35,7 @@ from __future__ import unicode_literals
TEMP_ENV = 'temporary'
GLOBAL_ENV = 'global'
class Environment(object):
'''
Class to manipulate the associated environment with "environmentable" classes (mainly modules).
@ -43,7 +44,7 @@ class Environment(object):
The environment is composed of a "cache" and a "storage". First are volatile data, while second are persistent data.
'''
def __init__(self, uniqueKey, idGenerators = {}):
def __init__(self, uniqueKey, idGenerators={}):
'''
Initialized the Environment for the specified id
@param uniqueId: Key for this environment
@ -80,9 +81,7 @@ class Environment(object):
@param generatorId: Id of the generator to obtain
@return: Generator for that id, or None if no generator for that id is found
'''
if self._idGenerators.has_key(generatorId):
return self._idGenerators[generatorId]
return None
return self._idGenerators.get(generatorId, None)
def key(self):
'''
@ -102,7 +101,7 @@ class Environment(object):
v.release()
@staticmethod
def getEnvForTableElement(tblName, id_, idGeneratorsTypes = {}):
def getEnvForTableElement(tblName, id_, idGeneratorsTypes={}):
'''
From a table name, and a id, tries to load the associated environment or creates a new
one if no environment exists at database. The table name and the id are used to obtain the key
@ -114,7 +113,7 @@ class Environment(object):
'''
name = 't-' + tblName + '-' + str(id_)
idGenerators = {}
for k,v in idGeneratorsTypes.iteritems():
for k, v in idGeneratorsTypes.iteritems():
idGenerators[k] = v(name)
return Environment(name, idGenerators)
@ -125,7 +124,7 @@ class Environment(object):
@param type_: Type
@return Associated Environment
'''
return Environment('type-'+str(type_))
return Environment('type-' + str(type_))
@staticmethod
def getTempEnv():
@ -142,6 +141,7 @@ class Environment(object):
'''
return Environment(GLOBAL_ENV) # This environment is a global environment for general utility.
class Environmentable(object):
'''
This is a base class provided for all objects that have an environment associated. These are mainly modules
@ -203,4 +203,3 @@ class Environmentable(object):
Generator for the object and the id specified
'''
return self._env.idGenerators(generatorId)

View File

@ -93,4 +93,3 @@ class Serializable(object):
overwrite this attribute to set another codec
'''
return self.unmarshal(str_.decode(self.CODEC))

View File

@ -32,6 +32,7 @@ Core of UDS.
This package contains all core-related code for UDS
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
# Core needs tasks manager to register scheduled jobs, so we ensure of that here
from Environment import Environmentable
@ -40,4 +41,3 @@ from BaseModule import Module
import services
import auths
import transports

View File

@ -56,7 +56,7 @@ class JobsFactory(object):
return self._jobs
def insert(self, name, type_):
logger.debug('Inserting job {0} of type {1}'.format(name, type_))
logger.debug('Inserting job {0} of type_ {1}'.format(name, type_))
try:
self._jobs[name] = type_
except Exception, e:

View File

@ -36,7 +36,8 @@ from django.conf import settings
from Crypto.PublicKey import RSA
from OpenSSL import crypto
from Crypto.Random import atfork
import hashlib, array
import hashlib
import array
import logging
@ -48,6 +49,7 @@ logger = logging.getLogger(__name__)
# import os
# RSA.generate(1024, os.urandom).exportKey()
class CryptoManager(object):
CODEC = 'base64'
@ -76,7 +78,7 @@ class CryptoManager(object):
def xor(self, s1, s2):
s1, s2 = s1.encode('utf-8'), s2.encode('utf-8')
mult = (len(s1)/len(s2)) + 1
mult = (len(s1) / len(s2)) + 1
s1 = array.array(b'B', s1)
s2 = array.array(b'B', s2 * mult)
return array.array(b'B', (s1[i] ^ s2[i] for i in range(len(s1)))).tostring()
@ -88,7 +90,7 @@ class CryptoManager(object):
raise e
return pk
def loadCertificate(self,certificate):
def loadCertificate(self, certificate):
try:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, certificate)
except crypto.Error as e:
@ -98,7 +100,6 @@ class CryptoManager(object):
def certificateString(self, certificate):
return certificate.replace('-----BEGIN CERTIFICATE-----', '').replace('-----END CERTIFICATE-----', '').replace('\n', '')
def hash(self, string):
if string is '' or string is None:
return ''

View File

@ -30,14 +30,17 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import os, tempfile, zipfile, uuid
import os
import uuid
from django.http import HttpResponse, Http404
from django.core.servers.basehttp import FileWrapper
import logging
logger = logging.getLogger(__name__)
class DownloadsManager(object):
'''
Manager so connectors can register their own downloadables
@ -61,8 +64,7 @@ class DownloadsManager(object):
DownloadsManager._manager = DownloadsManager()
return DownloadsManager._manager
def registerDownloadable(self, name, comment, path, mime = 'application/octet-stream'):
def registerDownloadable(self, name, comment, path, mime='application/octet-stream'):
'''
Registers a downloadable file.
@param name: name shown
@ -70,16 +72,15 @@ class DownloadsManager(object):
@params zip: If download as zip
'''
_id = str(uuid.uuid5(self._namespace, name))
self._downloadables[_id] = { 'name': name, 'comment' : comment, 'path' : path, 'mime' : mime }
self._downloadables[_id] = {'name': name, 'comment': comment, 'path': path, 'mime': mime}
def getDownloadables(self):
return self._downloadables
def send(self, request, _id):
if self._downloadables.has_key(_id) is False:
if _id not in self._downloadables:
return Http404()
return self.__send_file(request, self._downloadables[_id]['name'], self._downloadables[_id]['path'], self._downloadables[_id]['mime']);
return self.__send_file(request, self._downloadables[_id]['name'], self._downloadables[_id]['path'], self._downloadables[_id]['mime'])
def __send_file(self, request, name, filename, mime):
"""
@ -92,4 +93,3 @@ class DownloadsManager(object):
response['Content-Length'] = os.path.getsize(filename)
response['Content-Disposition'] = 'attachment; filename=' + name
return response

View File

@ -29,7 +29,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.models import UserService
from uds.models import DeployedServicePublication
@ -52,16 +52,17 @@ OT_USERSERVICE, OT_PUBLICATION, OT_DEPLOYED_SERVICE, OT_SERVICE, OT_PROVIDER, OT
# Dict for translations
transDict = {
UserService : OT_USERSERVICE,
DeployedServicePublication : OT_PUBLICATION,
DeployedService : OT_DEPLOYED_SERVICE,
Service : OT_SERVICE,
Provider : OT_PROVIDER,
User : OT_USER,
Group : OT_GROUP,
Authenticator : OT_AUTHENTICATOR
UserService: OT_USERSERVICE,
DeployedServicePublication: OT_PUBLICATION,
DeployedService: OT_DEPLOYED_SERVICE,
Service: OT_SERVICE,
Provider: OT_PROVIDER,
User: OT_USER,
Group: OT_GROUP,
Authenticator: OT_AUTHENTICATOR
}
class LogManager(object):
'''
Manager for logging (at database) events
@ -84,15 +85,15 @@ class LogManager(object):
from uds.models import getSqlDatetime
from uds.models import Log
qs = Log.objects.filter(owner_id = owner_id, owner_type = owner_type)
qs = Log.objects.filter(owner_id=owner_id, owner_type=owner_type)
# First, ensure we do not have more than requested logs, and we can put one more log item
if qs.count() >= GlobalConfig.MAX_LOGS_PER_ELEMENT.getInt():
for i in qs.order_by('-created',)[GlobalConfig.MAX_LOGS_PER_ELEMENT.getInt()-1:]:
for i in qs.order_by('-created',)[GlobalConfig.MAX_LOGS_PER_ELEMENT.getInt() - 1:]:
i.delete()
if avoidDuplicates is True:
try:
lg = Log.objects.filter(owner_id = owner_id, owner_type = owner_type, level = level, source = source).order_by('-created', '-id')[0]
lg = Log.objects.filter(owner_id=owner_id, owner_type=owner_type, level=level, source=source).order_by('-created', '-id')[0]
if lg.message == message:
# Do not log again, already logged
return
@ -101,19 +102,18 @@ class LogManager(object):
# now, we add new log
try:
Log.objects.create(owner_type = owner_type, owner_id = owner_id, created = getSqlDatetime(), source = source, level = level, data = message)
Log.objects.create(owner_type=owner_type, owner_id=owner_id, created=getSqlDatetime(), source=source, level=level, data=message)
except:
# Some objects will not get logged, such as System administrator objects
pass
def __getLogs(self, owner_type, owner_id, limit):
'''
Get all logs associated with an user service, ordered by date
'''
from uds.models import Log
qs = Log.objects.filter(owner_id = owner_id, owner_type = owner_type)
qs = Log.objects.filter(owner_id=owner_id, owner_type=owner_type)
return [{'date': x.created, 'level': x.level, 'source': x.source, 'message': x.data} for x in reversed(qs.order_by('-created', '-id')[:limit])]
def __clearLogs(self, owner_type, owner_id):
@ -122,11 +122,9 @@ class LogManager(object):
'''
from uds.models import Log
Log.objects.filter(owner_id = owner_id, owner_type = owner_type).delete()
Log.objects.filter(owner_id=owner_id, owner_type=owner_type).delete()
def doLog(self, wichObject, level, message, source, avoidDuplicates = True):
def doLog(self, wichObject, level, message, source, avoidDuplicates=True):
'''
Do the logging for the requested object.
@ -141,7 +139,6 @@ class LogManager(object):
else:
logger.debug('Requested doLog for a type of object not covered: {0}'.format(wichObject))
def getLogs(self, wichObject, limit):
'''
Get the logs associated with "wichObject", limiting to "limit" (default is GlobalConfig.MAX_LOGS_PER_ELEMENT)

View File

@ -44,16 +44,17 @@ logger = logging.getLogger(__name__)
PUBTAG = 'pm-'
class PublicationOldMachinesCleaner(DelayedTask):
def __init__(self, publicationId):
super(PublicationOldMachinesCleaner,self).__init__()
super(PublicationOldMachinesCleaner, self).__init__()
self._id = publicationId
@transaction.atomic
def run(self):
try:
dsp = DeployedServicePublication.objects.get(pk=self._id)
if (dsp.state!=State.REMOVABLE):
if (dsp.state != State.REMOVABLE):
logger.info('Already removed')
now = getSqlDatetime()
@ -65,9 +66,10 @@ class PublicationOldMachinesCleaner(DelayedTask):
# Removed provider, no problem at all, no update is done
pass
class PublicationLauncher(DelayedTask):
def __init__(self, publish):
super(PublicationLauncher,self).__init__()
super(PublicationLauncher, self).__init__()
self._publishId = publish.id
def run(self):
@ -85,7 +87,7 @@ class PublicationLauncher(DelayedTask):
deployedService.current_pub_revision += 1
deployedService.save()
PublicationFinishChecker.checkAndUpdateState(dsp, pi, state)
except Exception as e:
except Exception:
logger.exception("Exception launching publication")
dsp.state = State.ERROR
dsp.save()
@ -94,7 +96,7 @@ class PublicationLauncher(DelayedTask):
# Delayed Task that checks if a publication is done
class PublicationFinishChecker(DelayedTask):
def __init__(self, publish):
super(PublicationFinishChecker,self).__init__()
super(PublicationFinishChecker, self).__init__()
self._publishId = publish.id
self._state = publish.state
@ -111,10 +113,10 @@ class PublicationFinishChecker(DelayedTask):
# Now we mark, if it exists, the previous usable publication as "Removable"
if State.isPreparing(prevState):
for old in dsp.deployed_service.publications.filter(state=State.USABLE):
old.state=State.REMOVABLE
old.state = State.REMOVABLE
old.save()
pc = PublicationOldMachinesCleaner(old.id)
pc.register(GlobalConfig.SESSION_EXPIRE_TIME.getInt(True)*3600, 'pclean-'+str(old.id), True)
pc.register(GlobalConfig.SESSION_EXPIRE_TIME.getInt(True) * 3600, 'pclean-' + str(old.id), True)
dsp.setState(State.USABLE)
dsp.deployed_service.markOldUserServicesAsRemovables(dsp)
@ -152,7 +154,7 @@ class PublicationFinishChecker(DelayedTask):
@transaction.atomic
def run(self):
logger.debug('Checking publication finished {0}'.format(self._publishId))
try :
try:
dsp = DeployedServicePublication.objects.select_for_update().get(pk=self._publishId)
if dsp.state != self._state:
logger.debug('Task overrided by another task (state of item changed)')
@ -164,6 +166,7 @@ class PublicationFinishChecker(DelayedTask):
except Exception, e:
logger.debug('Deployed service not found (erased from database) {0} : {1}'.format(e.__class__, e))
class PublicationManager(object):
_manager = None
@ -176,21 +179,20 @@ class PublicationManager(object):
PublicationManager._manager = PublicationManager()
return PublicationManager._manager
def publish(self, deployedService):
with transaction.atomic():
if deployedService.publications.select_for_update().filter(state__in=State.PUBLISH_STATES).count() > 0:
raise PublishException(_('Already publishing. Wait for previous publication to finish and try again'))
try:
now = getSqlDatetime()
dsp = deployedService.publications.create(state = State.LAUNCHING, state_date = now, publish_date = now, revision = deployedService.current_pub_revision)
dsp = deployedService.publications.create(state=State.LAUNCHING, state_date=now, publish_date=now, revision=deployedService.current_pub_revision)
DelayedTaskRunner.runner().insert(PublicationLauncher(dsp), 4, PUBTAG + str(dsp.id))
except Exception as e:
logger.debug('Caught exception at publish: {0}'.format(e))
raise PublishException(str(e))
@transaction.atomic
def cancel(self,dsp):
def cancel(self, dsp):
dsp = DeployedServicePublication.objects.select_for_update().get(id=dsp.id)
if dsp.state not in State.PUBLISH_STATES:
raise PublishException(_('Can\'t cancel non running publication'))
@ -223,4 +225,3 @@ class PublicationManager(object):
PublicationFinishChecker.checkAndUpdateState(dsp, pi, state)
except Exception, e:
raise PublishException(str(e))

View File

@ -29,6 +29,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.core.util.Config import GlobalConfig
@ -56,17 +57,16 @@ class StatsManager(object):
StatsManager._manager = StatsManager()
return StatsManager._manager
def __doCleanup(self, dbTable):
from uds.models import getSqlDatetime, optimizeTable
from django.db import connection, transaction
from django.db import connection
import datetime
import time
minTime = time.mktime( (getSqlDatetime() - datetime.timedelta(days = GlobalConfig.STATS_DURATION.getInt())).timetuple() )
minTime = time.mktime((getSqlDatetime() - datetime.timedelta(days=GlobalConfig.STATS_DURATION.getInt())).timetuple())
# Don't like how django executes this (recovers all IDS and lauches "DELETE .. WHERE id IN ...)
#StatsCounters.objects.filter(stamp__lt=minTime).delete()
# StatsCounters.objects.filter(stamp__lt=minTime).delete()
# Used dict, cause environment says _meta is not known :-)
query = 'DELETE FROM {0} where STAMP < {1}'.format(dbTable, minTime)
cursor = connection.cursor()
@ -75,9 +75,8 @@ class StatsManager(object):
# This will ensure table is in "good" shape (testing right now, will see at future)
optimizeTable(dbTable)
# Counter stats
def addCounter(self, owner_type, owner_id, counterType, counterValue, stamp = None):
def addCounter(self, owner_type, owner_id, counterType, counterValue, stamp=None):
'''
Adds a new counter stats to database.
@ -110,8 +109,7 @@ class StatsManager(object):
logger.error('Exception handling counter stats saving (maybe database is full?)')
return False
def getCounters(self, ownerType, counterType, ownerIds, since, to, limit, use_max = False):
def getCounters(self, ownerType, counterType, ownerIds, since, to, limit, use_max=False):
'''
Retrieves counters from item
@ -134,8 +132,7 @@ class StatsManager(object):
since = int(time.mktime(since.timetuple()))
to = int(time.mktime(to.timetuple()))
return StatsCounters.get_grouped(ownerType, counterType, owner_id = ownerIds, since = since, to = to, limit = limit, use_max = use_max)
return StatsCounters.get_grouped(ownerType, counterType, owner_id=ownerIds, since=since, to=to, limit=limit, use_max=use_max)
def cleanupCounters(self):
'''
@ -147,7 +144,7 @@ class StatsManager(object):
# Event stats
# Counter stats
def addEvent(self, owner_type, owner_id, eventType, stamp = None):
def addEvent(self, owner_type, owner_id, eventType, stamp=None):
'''
Adds a new event stat to database.
@ -201,5 +198,3 @@ class StatsManager(object):
from uds.models import StatsEvents
self.__doCleanup(StatsEvents.__dict__['_meta'].db_table)

View File

@ -30,15 +30,20 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.core.jobs.Scheduler import Scheduler
from uds.core.jobs.DelayedTaskRunner import DelayedTaskRunner
from uds.core import jobs
from uds.core.util.Config import GlobalConfig
import threading, time, signal
import threading
import time
import signal
import logging
logger = logging.getLogger(__name__)
class SchedulerThread(threading.Thread):
def run(self):
Scheduler.scheduler().run()
@ -46,6 +51,7 @@ class SchedulerThread(threading.Thread):
def notifyTermination(self):
Scheduler.scheduler().notifyTermination()
class DelayedTaskThread(threading.Thread):
def run(self):
DelayedTaskRunner.runner().run()
@ -75,13 +81,12 @@ class TaskManager(object):
jobName = jobType.friendly_name
jobs.factory().insert(jobName, jobType)
@staticmethod
def registerScheduledTasks():
logger.info("Registering sheduled tasks")
from uds.core import workers
# Simply import this to make workers "auto import"
from uds.core import workers # @UnusedImport
@staticmethod
def run():
@ -96,13 +101,13 @@ class TaskManager(object):
logger.info('Starting {0} schedulers and {1} task executors'.format(noSchedulers, noDelayedTasks))
threads = []
for n in range(noSchedulers):
for _ in range(noSchedulers):
thread = SchedulerThread()
thread.start()
threads.append(thread)
time.sleep(0.5)
for n in range(noDelayedTasks):
for _ in range(noDelayedTasks):
thread = DelayedTaskThread()
thread.start()
threads.append(thread)
@ -110,18 +115,16 @@ class TaskManager(object):
signal.signal(signal.SIGTERM, TaskManager.sigTerm)
# Debugging stuff
#import guppy
#from guppy.heapy import Remote
#Remote.on()
# import guppy
# from guppy.heapy import Remote
# Remote.on()
#gc.set_debug(gc.DEBUG_LEAK)
while( TaskManager.keepRunning ):
# gc.set_debug(gc.DEBUG_LEAK)
while(TaskManager.keepRunning):
time.sleep(1)
for thread in threads:
thread.notifyTermination()
# The join of threads will happen before termination, so its fine to just return here

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
@ -39,6 +40,7 @@ import logging
logger = logging.getLogger(__name__)
class UserPrefsManager(object):
_manager = None
@ -58,7 +60,7 @@ class UserPrefsManager(object):
'''
Register an array of preferences for a module
'''
self._prefs[modName] = { 'friendlyName' : friendlyModName, 'prefs' : prefs }
self._prefs[modName] = {'friendlyName': friendlyModName, 'prefs': prefs}
def getPreferencesForUser(self, modName, user):
'''
@ -68,7 +70,7 @@ class UserPrefsManager(object):
for up in user.preferences.filter(module=modName):
prefs[up.name] = up.value
for p in self._prefs[modName]['prefs']:
if prefs.has_key(p.getName()) is False:
if p.getName() not in prefs:
prefs[p.getName()] = p.getDefValue()
return prefs
@ -82,8 +84,8 @@ class UserPrefsManager(object):
form = forms.Form()
for p in v['prefs']:
name = self.__nameFor(mod, p.getName())
val = data[name] if data.has_key(name) else p.getDefValue()
form.fields[ name ] = p.formField(val)
val = data[name] if name in data else p.getDefValue()
form.fields[name] = p.formField(val)
res += '<fieldset class="prefset"><legend>' + v['friendlyName'] + '</legend>' + form.as_p() + '</fieldset>'
return res
@ -97,12 +99,11 @@ class UserPrefsManager(object):
grp = []
for p in v['prefs']:
name = self.__nameFor(mod, p.getName())
val = data[name] if data.has_key(name) else p.getDefValue()
grp.append( { 'name' : name, 'gui' : p.guiField(val).guiDescription(), 'value' : val } )
res.append( {'moduleLabel': v['friendlyName'], 'prefs': grp} )
val = data[name] if name in data else p.getDefValue()
grp.append({'name': name, 'gui': p.guiField(val).guiDescription(), 'value': val})
res.append({'moduleLabel': v['friendlyName'], 'prefs': grp})
return res
def processRequestForUserPreferences(self, user, data):
'''
Returns a list of errors in case of error, else return None
@ -122,7 +123,7 @@ class UserPrefsManager(object):
for p in v['prefs']:
name = self.__nameFor(mod, p.getName())
logger.debug(name)
prefs.append({ 'module': mod, 'name': p.getName(), 'value': form.cleaned_data[name] } )
prefs.append({'module': mod, 'name': p.getName(), 'value': form.cleaned_data[name]})
user.preferences.all().delete()
for p in prefs:
user.preferences.create(module=p['module'], name=p['name'], value=p['value'])
@ -137,20 +138,20 @@ class UserPrefsManager(object):
logger.debug(mod)
for p in v['prefs']:
name = self.__nameFor(mod, p.getName())
if data.has_key(name):
prefs.append( { 'module': mod, 'name': p.getName(), 'value': data[name] } )
if name in data:
prefs.append({'module': mod, 'name': p.getName(), 'value': data[name]})
user.preferences.all().delete()
for p in prefs:
user.preferences.create(module=p['module'], name=p['name'], value=p['value'])
class UserPreference(object):
TYPE = 'abstract'
def __init__(self, **kwargs):
self._name = kwargs['name']
self._label = kwargs['label']
self._defValue = kwargs['defvalue'] if kwargs.has_key('defvalue') else None
self._defValue = kwargs.get('defvalue', None)
self._css = 'form-control'
def getName(self):
@ -173,48 +174,54 @@ class UserPreference(object):
class UserTextPreference(UserPreference):
TYPE = 'text'
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**kwargs)
self._length = kwargs['length'] if kwargs.has_key('length') else None
super(self.__class__, self).__init__(**kwargs)
self._length = kwargs.get('length', None)
def formField(self, value):
return forms.CharField(label = _(self._label), initial = value, attrs = {'class': self._css})
return forms.CharField(label=_(self._label), initial=value, attrs={'class': self._css})
class UserNumericPreference(UserPreference):
TYPE = 'numeric'
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**kwargs)
self._min = kwargs['minvalue'] if kwargs.has_key('minvalue') else None
self._max = kwargs['maxvalue'] if kwargs.has_key('maxvalue') else None
super(self.__class__, self).__init__(**kwargs)
self._min = kwargs.get('minvalue', None)
self._max = kwargs.get('maxvalue', None)
def formField(self, value):
return forms.IntegerField(label = _(self._label), initial = value, min_value = self._min, max_value = self._max,
widget = forms.TextInput(attrs = {'class': self._css}))
return forms.IntegerField(label=_(self._label), initial=value, min_value=self._min, max_value=self._max,
widget=forms.TextInput(attrs={'class': self._css}))
class UserChoicePreference(UserPreference):
TYPE = 'choice'
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**kwargs)
super(self.__class__, self).__init__(**kwargs)
'''
Values are a tuple of
'''
self._values = kwargs['values']
def formField(self, value):
return forms.ChoiceField(label = _(self._label), initial = value, choices = self._values,
widget = forms.Select(attrs = {'class': self._css}))
return forms.ChoiceField(label=_(self._label), initial=value, choices=self._values,
widget=forms.Select(attrs={'class': self._css}))
def guiField(self, value):
vals = []
for v in self._values:
vals.append( { 'id': v[0], 'text': _(v[1]) } )
return gui.ChoiceField(label = _(self._label), rdonly = False, values = vals, defvalue=value, tooltip = _(self._label))
vals.append({'id': v[0], 'text': _(v[1])})
return gui.ChoiceField(label=_(self._label), rdonly=False, values=vals, defvalue=value, tooltip=_(self._label))
class UserCheckboxPreference(UserPreference):
TYPE = 'checkbox'
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**kwargs)
super(self.__class__, self).__init__(**kwargs)
class CommonPrefs(object):
@ -235,10 +242,11 @@ class CommonPrefs(object):
'''
Get width based on screenSizePref value
'''
return { CommonPrefs.SZ_640x480 : (640, 480),
CommonPrefs.SZ_800x600 : (800, 600),
CommonPrefs.SZ_1024x768 : (1024, 768),
CommonPrefs.SZ_FULLSCREEN : (-1, -1)
return {
CommonPrefs.SZ_640x480: (640, 480),
CommonPrefs.SZ_800x600: (800, 600),
CommonPrefs.SZ_1024x768: (1024, 768),
CommonPrefs.SZ_FULLSCREEN: (-1, -1)
}[prefsDict[CommonPrefs.SZ_PREF]]
@staticmethod
@ -246,26 +254,27 @@ class CommonPrefs(object):
'''
Get depth based on depthPref value
'''
return { CommonPrefs.DEPTH_8 : 8,
CommonPrefs.DEPTH_16 : 16,
CommonPrefs.DEPTH_24 : 24,
CommonPrefs.DEPTH_32 : 32 }[ prefsDict[CommonPrefs.DEPTH_PREF] ]
return {
CommonPrefs.DEPTH_8: 8,
CommonPrefs.DEPTH_16: 16,
CommonPrefs.DEPTH_24: 24,
CommonPrefs.DEPTH_32: 32
}[prefsDict[CommonPrefs.DEPTH_PREF]]
screenSizePref = UserChoicePreference(name = SZ_PREF, label = ugettext_lazy('Screen Size'), defvalue = SZ_FULLSCREEN, values = (
screenSizePref = UserChoicePreference(name=SZ_PREF,
label=ugettext_lazy('Screen Size'),
defvalue=SZ_FULLSCREEN,
values=(
(SZ_640x480, '640x480'),
(SZ_800x600, '800x600'),
(SZ_1024x768, '1024x768'),
(SZ_FULLSCREEN, ugettext_lazy('Full Screen'))
)
)
depthPref = UserChoicePreference(name = DEPTH_PREF, label = ugettext_lazy('Screen colors'), defvalue = DEPTH_24, values = (
depthPref = UserChoicePreference(name=DEPTH_PREF, label=ugettext_lazy('Screen colors'), defvalue=DEPTH_24, values=(
(DEPTH_8, ugettext_lazy('8 bits')),
(DEPTH_16, ugettext_lazy('16 bits')),
(DEPTH_24, ugettext_lazy('24 bits')),
(DEPTH_32, ugettext_lazy('32 bits')),
)
)

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
from django.db.models import Q
@ -50,9 +51,10 @@ logger = logging.getLogger(__name__)
USERSERVICE_TAG = 'cm-'
class UserServiceOpChecker(DelayedTask):
def __init__(self, service):
super(UserServiceOpChecker,self).__init__()
super(UserServiceOpChecker, self).__init__()
self._svrId = service.id
self._state = service.state
@ -118,7 +120,7 @@ class UserServiceOpChecker(DelayedTask):
if checkLater:
UserServiceOpChecker.checkLater(userService, userServiceInstance)
except Exception as e:
logger.exception('Checkin service state')
logger.exception('Checking service state')
log.doLog(userService, log.ERROR, 'Exception: {0}'.format(e), log.INTERNAL)
userService.setState(State.ERROR)
userService.save()
@ -194,11 +196,9 @@ class UserServiceManager(object):
numberOfServices = deployedService.userServices.select_for_update().filter(
state__in=[State.PREPARING, State.USABLE]).count()
if serviceInstance.maxDeployed <= numberOfServices:
raise MaxServicesReachedException(
'Max number of allowed deployments for service reached'
)
if serviceInstance.maxDeployed <= numberOfServices:
raise MaxServicesReachedException('Max number of allowed deployments for service reached')
def __createCacheAtDb(self, deployedServicePublication, cacheLevel):
'''
@ -207,9 +207,9 @@ class UserServiceManager(object):
# Checks if maxDeployed has been reached and if so, raises an exception
self.__checkMaxDeployedReached(deployedServicePublication.deployed_service)
now = getSqlDatetime()
return deployedServicePublication.userServices.create(cache_level = cacheLevel, state = State.PREPARING, os_state = State.PREPARING,
state_date=now, creation_date=now, data = '', deployed_service = deployedServicePublication.deployed_service,
user = None, in_use = False )
return deployedServicePublication.userServices.create(cache_level=cacheLevel, state=State.PREPARING, os_state=State.PREPARING,
state_date=now, creation_date=now, data='', deployed_service=deployedServicePublication.deployed_service,
user=None, in_use=False)
def __createAssignedAtDb(self, deployedServicePublication, user):
'''
@ -231,7 +231,6 @@ class UserServiceManager(object):
return deployedService.userServices.create(cache_level=0, state=State.PREPARING, os_state=State.PREPARING,
state_date=now, creation_date=now, data='', publication=None, user=user, in_use=False)
def createCacheFor(self, deployedServicePublication, cacheLevel):
'''
Creates a new cache for the deployed service publication at level indicated
@ -278,8 +277,6 @@ class UserServiceManager(object):
logger.debug("Assignable: {0}".format(assignable))
return assignable
def moveToLevel(self, cache, cacheLevel):
'''
Moves a cache element from one level to another
@ -315,7 +312,6 @@ class UserServiceManager(object):
UserServiceOpChecker.makeUnique(uService, ui, state)
return uService
def remove(self, uService):
'''
Removes a uService element
@ -342,22 +338,21 @@ class UserServiceManager(object):
def removeInfoItems(self, dsp):
dsp.cachedDeployedService.select_for_update().filter(state__in=State.INFO_STATES).delete()
def getAssignationForUser(self, ds, user):
# First, we try to locate an already assigned service
existing = ds.assignedUserServices().filter(user=user,state__in=State.VALID_STATES)
existing = ds.assignedUserServices().filter(user=user, state__in=State.VALID_STATES)
lenExisting = existing.count()
if lenExisting > 0: # Already has 1 assigned
logger.debug('Found assigned service from {0} to user {1}'.format(ds, user.name))
return existing[0]
#if existing[0].state == State.ERROR:
# if existing[0].state == State.ERROR:
# if lenExisting > 1:
# return existing[1]
#else:
# else:
# return existing[0]
# Now try to locate 1 from cache already "ready" (must be usable and at level 1)
cache = ds.cachedUserServices().select_for_update().filter(cache_level = services.UserDeployment.L1_CACHE, state = State.USABLE)[:1]
cache = ds.cachedUserServices().select_for_update().filter(cache_level=services.UserDeployment.L1_CACHE, state=State.USABLE)[:1]
if len(cache) > 0:
cache = cache[0] # Database object
cache.assignToUser(user)
@ -369,7 +364,7 @@ class UserServiceManager(object):
cache.save()
return cache
# Now find if there is a preparing one
cache = ds.cachedUserServices().select_for_update().filter(cache_level = services.UserDeployment.L1_CACHE, state = State.PREPARING)[:1]
cache = ds.cachedUserServices().select_for_update().filter(cache_level=services.UserDeployment.L1_CACHE, state=State.PREPARING)[:1]
if len(cache) > 0:
cache = cache[0]
cache.assignToUser(user)
@ -436,14 +431,13 @@ class UserServiceManager(object):
This method is used by UserService when a request for setInUse(False) is made
This checks that the service can continue existing or not
'''
#uService = UserService.objects.select_for_update().get(id=uService.id)
# uService = UserService.objects.select_for_update().get(id=uService.id)
if uService.publication == None:
return
if uService.publication.id != uService.deployed_service.activePublication().id:
logger.debug('Old revision of user service, marking as removable: {0}'.format(uService))
uService.remove()
def notifyReadyFromOsManager(self, uService, data):
ui = uService.getInstance()
logger.debug('Notifying user service ready state')
@ -455,4 +449,3 @@ class UserServiceManager(object):
elif uService.state in (State.USABLE, State.PREPARING): # We don't want to get active deleting or deleted machines...
uService.setState(State.PREPARING)
UserServiceOpChecker.makeUnique(uService, ui, state)

View File

@ -32,6 +32,10 @@ UDS managers (downloads, users preferences, publications, ...)
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
__updated__ = '2014-02-19'
def cryptoManager():
from CryptoManager import CryptoManager
@ -47,10 +51,12 @@ def downloadsManager():
from DownloadsManager import DownloadsManager
return DownloadsManager.manager()
def logManager():
from LogManager import LogManager
return LogManager.manager()
def statsManager():
from StatsManager import StatsManager
return StatsManager.manager()

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django.utils.translation import ugettext_noop as _
from uds.core.util.State import State
@ -37,6 +38,7 @@ from uds.core import Module
STORAGE_KEY = 'osmk'
class OSManager(Module):
'''
An OS Manager is responsible for communication the service the different actions to take (i.e. adding a windows machine to a domain)
@ -55,7 +57,7 @@ class OSManager(Module):
# Time is defined as a global config
processUnusedMachines = False
def __init__(self,environment, values):
def __init__(self, environment, values):
super(OSManager, self).__init__(environment, values)
self.initialize(values)
@ -85,7 +87,7 @@ class OSManager(Module):
pass
# These methods must be overriden
def process(self,service, message, data):
def process(self, service, message, data):
'''
This method must be overriden so your so manager can manage requests and responses from agent.
@param service: Service that sends the request (virtual machine or whatever)
@ -94,7 +96,7 @@ class OSManager(Module):
'''
pass
def checkState(self,service):
def checkState(self, service):
'''
This method must be overriden so your os manager can respond to requests from system to the current state of the service
This method will be invoked when:
@ -108,7 +110,6 @@ class OSManager(Module):
'''
return State.FINISHED
def processUnused(self, userService):
'''
This will be invoked for every assigned and unused user service that has been in this state at least 1/2 of Globalconfig.CHECK_UNUSED_TIME
@ -149,4 +150,3 @@ class OSManager(Module):
def __str__(self):
return "Base OS Manager"

View File

@ -36,6 +36,7 @@ import logging
logger = logging.getLogger(__name__)
class OSManagersFactory(object):
_factory = None

View File

@ -32,12 +32,13 @@ UDS os managers related interfaces and classes
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from BaseOsManager import OSManager
def factory():
'''
Returns factory for register/access to authenticators
'''
from OSManagersFactory import OSManagersFactory
return OSManagersFactory.factory()

View File

@ -30,10 +30,13 @@
'''
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.core import Environmentable
from uds.core import Serializable
from uds.core.util.State import State
class UserDeployment(Environmentable, Serializable):
'''
Interface for deployed services.
@ -97,16 +100,16 @@ class UserDeployment(Environmentable, Serializable):
method reasonOfError can be called multiple times, including
serializations in middle, so remember to include reason of error in serializations
'''
L1_CACHE = 1 #: Constant for Cache of level 1
L2_CACHE = 2 #: Constant for Cache of level 2
L1_CACHE = 1 # : Constant for Cache of level 1
L2_CACHE = 2 # : Constant for Cache of level 2
#: Suggested time for deployment finishing, in seconds
#: This allows the manager to, if deployment is no done in 1 step, re-check
#: the deployment once this time has passed, i.e. KVM COW deployment takes
#: low time, so we suggest to check at short intervals, but full copys takes
#: a bit more so we can use longer interval checks
#: This attribute is accessed always through an instance object,
#: so u can modify it at your own implementation.
# : Suggested time for deployment finishing, in seconds
# : This allows the manager to, if deployment is no done in 1 step, re-check
# : the deployment once this time has passed, i.e. KVM COW deployment takes
# : low time, so we suggest to check at short intervals, but full copys takes
# : a bit more so we can use longer interval checks
# : This attribute is accessed always through an instance object,
# : so u can modify it at your own implementation.
suggestedTime = 10
def __init__(self, environment, **kwargs):
@ -130,18 +133,9 @@ class UserDeployment(Environmentable, Serializable):
Environmentable.__init__(self, environment)
Serializable.__init__(self)
self._service = kwargs['service'] # Raises an exception if service is not included. Parent
if kwargs.has_key('publication'):
self._publication = kwargs['publication']
else:
self._publication = None
if kwargs.has_key('osmanager'):
self._osmanager = kwargs['osmanager']
else:
self._osmanager = None
if kwargs.has_key('dbservice'): # Reference to database service, will be there most time :-)
self._dbService = kwargs['dbservice']
else:
self._dbService = None
self._publication = kwargs.get('publication', None)
self._osmanager = kwargs.get('osmanager', None)
self._dbService = kwargs.get('dbservice', None)
self.initialize()
@ -155,7 +149,6 @@ class UserDeployment(Environmentable, Serializable):
'''
pass
def getName(self):
'''
Override this to return a name to display under some circustances
@ -399,7 +392,6 @@ class UserDeployment(Environmentable, Serializable):
'''
raise Exception('Base deploy for user invoked! for class {0}'.format(self.__class__.__name__))
def checkState(self):
'''
This is a task method. As that, the expected return values are

View File

@ -30,9 +30,12 @@
'''
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.core import Environmentable
from uds.core import Serializable
class Publication(Environmentable, Serializable):
'''
This class is in fact an interface, and defines the logic of a publication
@ -63,13 +66,13 @@ class Publication(Environmentable, Serializable):
# Description of the publication
#:Suggested time for publication finishing, in seconds
#: This allows the manager to, if publication is no done in 1 step,
#: re-check the publication once this time has passed, i.e. KVM COW publication
#: takes low time, so we suggest to check at short intervals,
#: but full clone takes a lot, so we suggest that checks are done more steady.
#: This attribute is always accessed using an instance object, so you can
#: change suggestedTime in your implementation.
# :Suggested time for publication finishing, in seconds
# : This allows the manager to, if publication is no done in 1 step,
# : re-check the publication once this time has passed, i.e. KVM COW publication
# : takes low time, so we suggest to check at short intervals,
# : but full clone takes a lot, so we suggest that checks are done more steady.
# : This attribute is always accessed using an instance object, so you can
# : change suggestedTime in your implementation.
suggestedTime = 10
def __init__(self, environment, **kwargs):
@ -266,4 +269,3 @@ class Publication(Environmentable, Serializable):
String method, mainly used for debugging purposes
'''
return "Base Publication"

View File

@ -35,6 +35,7 @@ from __future__ import unicode_literals
from django.utils.translation import ugettext_noop as _
from uds.core import Module
class Service(Module):
'''
This class is in fact an interface, and represents a service, that is the
@ -73,87 +74,87 @@ class Service(Module):
'''
#: Constant for indicating that max elements this service can deploy is unlimited.
# : Constant for indicating that max elements this service can deploy is unlimited.
UNLIMITED = -1
#: Name of type, used at administration interface to identify this
#: service (i.e. Xen server, oVirt Server, ...)
#: This string will be translated when provided to admin interface
#: using ugettext, so you can mark it as "_" at derived classes (using ugettext_noop)
#: if you want so it can be translated.
# : Name of type, used at administration interface to identify this
# : service (i.e. Xen server, oVirt Server, ...)
# : This string will be translated when provided to admin interface
# : using ugettext, so you can mark it as "_" at derived classes (using ugettext_noop)
# : if you want so it can be translated.
typeName = _('Base Service')
#: Name of type used by Managers to identify this type of service
#: We could have used here the Class name, but we decided that the
#: module implementator will be the one that will provide a name that
#: will relation the class (type) and that name.
# : Name of type used by Managers to identify this type of service
# : We could have used here the Class name, but we decided that the
# : module implementator will be the one that will provide a name that
# : will relation the class (type) and that name.
typeType = 'BaseService'
#: Description shown at administration level for this service.
#: This string will be translated when provided to admin interface
#: using ugettext, so you can mark it as "_" at derived classes (using ugettext_noop)
#: if you want so it can be translated.
# : Description shown at administration level for this service.
# : This string will be translated when provided to admin interface
# : using ugettext, so you can mark it as "_" at derived classes (using ugettext_noop)
# : if you want so it can be translated.
typeDescription = _('Base Service')
#: Icon file, used to represent this service at administration interface
#: This file should be at same folder as this class is, except if you provide
#: your own :py:meth:uds.core.BaseModule.BaseModule.icon method.
# : Icon file, used to represent this service at administration interface
# : This file should be at same folder as this class is, except if you provide
# : your own :py:meth:uds.core.BaseModule.BaseModule.icon method.
iconFile = 'service.png'
# Functional related data
#: Normally set to UNLIMITED. This attribute indicates if the service has some "limitation"
#: for providing deployed services to users. This attribute can be set here or
#: modified at instance level, core will access always to it using an instance object.
maxDeployed = UNLIMITED #: If the service provides more than 1 "provided service" (-1 = no limit, 0 = ???? (do not use it!!!), N = max number to deploy
# : Normally set to UNLIMITED. This attribute indicates if the service has some "limitation"
# : for providing deployed services to users. This attribute can be set here or
# : modified at instance level, core will access always to it using an instance object.
maxDeployed = UNLIMITED # : If the service provides more than 1 "provided service" (-1 = no limit, 0 = ???? (do not use it!!!), N = max number to deploy
#: If this class uses cache or not. If uses cache is true, means that the
#: service can "prepare" some user deployments to allow quicker user access
#: to services if he already do not have one.
#: If you set this to True, please, provide a _ :py:attr:.cacheToolTip
# : If this class uses cache or not. If uses cache is true, means that the
# : service can "prepare" some user deployments to allow quicker user access
# : to services if he already do not have one.
# : If you set this to True, please, provide a _ :py:attr:.cacheToolTip
usesCache = False
#: Tooltip to be used if services uses cache at administration interface, indicated by :py:attr:.usesCache
cacheTooltip = _('None') #: Tooltip shown to user when this item is pointed at admin interface
# : Tooltip to be used if services uses cache at administration interface, indicated by :py:attr:.usesCache
cacheTooltip = _('None') # : Tooltip shown to user when this item is pointed at admin interface
#: If user deployments can be cached (see :py:attr:.usesCache), may he also can provide a secondary cache,
#: that is no more that user deployments that are "almost ready" to be used, but preperably consumes less
#: resources than L1 cache. This can give a boost to cache L1 recovering in case of peaks
#: in demand. If you set this to True, please, provide also a _ :py:attr:.cacheTooltip_L2
usesCache_L2 = False #: If we need to generate a "Level 2" cache for this service (i.e., L1 could be running machines and L2 suspended machines)
# : If user deployments can be cached (see :py:attr:.usesCache), may he also can provide a secondary cache,
# : that is no more that user deployments that are "almost ready" to be used, but preperably consumes less
# : resources than L1 cache. This can give a boost to cache L1 recovering in case of peaks
# : in demand. If you set this to True, please, provide also a _ :py:attr:.cacheTooltip_L2
usesCache_L2 = False # : If we need to generate a "Level 2" cache for this service (i.e., L1 could be running machines and L2 suspended machines)
#: Tooltip to be used if services uses L2 cache at administration interface, indicated by :py:attr:.usesCache_L2
cacheTooltip_L2 = _('None') #: Tooltip shown to user when this item is pointed at admin interface
# : Tooltip to be used if services uses L2 cache at administration interface, indicated by :py:attr:.usesCache_L2
cacheTooltip_L2 = _('None') # : Tooltip shown to user when this item is pointed at admin interface
#: If the service needs a o.s. manager (see os managers section)
# : If the service needs a o.s. manager (see os managers section)
needsManager = False
#: If the service can be autoassigned or needs to be assigned by administrator
#: Not all services are for assigning it. Thing, i.e., a Service that manages
#: a number of Server. The desired behavior will be to let administrator
#: the service to a user in the administration interface, an not the system
#: to assign the service automatically. If this is true, the core will not
#: assign the service automatically, so if the user do not have a consumable
#: assigned, the user will never get one (of this kind, of course)
# : If the service can be autoassigned or needs to be assigned by administrator
# : Not all services are for assigning it. Thing, i.e., a Service that manages
# : a number of Server. The desired behavior will be to let administrator
# : the service to a user in the administration interface, an not the system
# : to assign the service automatically. If this is true, the core will not
# : assign the service automatically, so if the user do not have a consumable
# : assigned, the user will never get one (of this kind, of course)
mustAssignManually = False
#: Types of publications (preparated data for deploys)
#: If you provide this, UDS will assume that the service needs a preparation.
#: If not provided (it is None), UDS will assume that service do not needs
#: preparation. Take care, if you mark a service as it uses cache, you MUST
#: provide a publication type
#: This refers to class that provides the logic for publication, you can see
#: :py:class:uds.core.services.Publication
# : Types of publications (preparated data for deploys)
# : If you provide this, UDS will assume that the service needs a preparation.
# : If not provided (it is None), UDS will assume that service do not needs
# : preparation. Take care, if you mark a service as it uses cache, you MUST
# : provide a publication type
# : This refers to class that provides the logic for publication, you can see
# : :py:class:uds.core.services.Publication
publicationType = None
#: Types of deploys (services in cache and/or assigned to users)
#: This is ALWAYS a MUST. You mast indicate the class responsible
#: for managing the user deployments (user consumable services generated
#: from this one). If this attribute is not set, the service will never work
#: (core will not know how to handle the user deployments)
# : Types of deploys (services in cache and/or assigned to users)
# : This is ALWAYS a MUST. You mast indicate the class responsible
# : for managing the user deployments (user consumable services generated
# : from this one). If this attribute is not set, the service will never work
# : (core will not know how to handle the user deployments)
deployedType = None
def __init__(self, environment, parent, values = None):
def __init__(self, environment, parent, values=None):
'''
Do not forget to invoke this in your derived class using "super(self.__class__, self).__init__(environment, parent, values)".
We want to use the env, parent methods outside class. If not called, you must implement your own methods
@ -216,10 +217,8 @@ class Service(Module):
'''
return self.idGenerators('name')
def __str__(self):
'''
String method, mainly used for debugging purposes
'''
return "Base Service Provider"

View File

@ -72,33 +72,33 @@ class ServiceProvider(Module):
not needed.
'''
#: Services that we offers. Here is a list of service types (python types) that
#: this class will provide. This types are the python clases, derived from
#: Service, that are childs of this provider
# : Services that we offers. Here is a list of service types (python types) that
# : this class will provide. This types are the python clases, derived from
# : Service, that are childs of this provider
offers = []
#: Name of type, used at administration interface to identify this
#: provider (i.e. Xen server, oVirt Server, ...)
#: This string will be translated when provided to admin interface
#: using ugettext, so you can mark it as "translatable" at derived classes (using ugettext_noop)
#: if you want so it can be translated.
# : Name of type, used at administration interface to identify this
# : provider (i.e. Xen server, oVirt Server, ...)
# : This string will be translated when provided to admin interface
# : using ugettext, so you can mark it as "translatable" at derived classes (using ugettext_noop)
# : if you want so it can be translated.
typeName = 'Base Provider'
#: Name of type used by Managers to identify this tipe of service
#: We could have used here the Class name, but we decided that the
#: module implementator will be the one that will provide a name that
#: will relation the class (type) and that name.
# : Name of type used by Managers to identify this tipe of service
# : We could have used here the Class name, but we decided that the
# : module implementator will be the one that will provide a name that
# : will relation the class (type) and that name.
typeType = 'BaseServiceProvider'
#: Description shown at administration level for this provider.
#: This string will be translated when provided to admin interface
#: using ugettext, so you can mark it as "translatable" at derived classes (using ugettext_noop)
#: if you want so it can be translated.
# : Description shown at administration level for this provider.
# : This string will be translated when provided to admin interface
# : using ugettext, so you can mark it as "translatable" at derived classes (using ugettext_noop)
# : if you want so it can be translated.
typeDescription = 'Base Service Provider'
#: Icon file, used to represent this provider at administration interface
#: This file should be at same folder as this class is, except if you provide
#: your own py:meth:`uds.core.BaseModule.BaseModule.icon` method.
# : Icon file, used to represent this provider at administration interface
# : This file should be at same folder as this class is, except if you provide
# : your own py:meth:`uds.core.BaseModule.BaseModule.icon` method.
iconFile = 'provider.png'
@classmethod
@ -125,8 +125,7 @@ class ServiceProvider(Module):
break
return res
def __init__(self, environment, values = None):
def __init__(self, environment, values=None):
'''
Do not forget to invoke this in your derived class using "super(self.__class__, self).__init__(environment, values)"
if you override this method. Better is to provide an "__initialize__" method, that will be invoked
@ -137,7 +136,6 @@ class ServiceProvider(Module):
super(ServiceProvider, self).__init__(environment, values)
self.initialize(values)
def initialize(self, values):
'''
This method will be invoked from __init__ constructor.
@ -161,4 +159,3 @@ class ServiceProvider(Module):
at user or admin interfaces.
'''
return "Base Service Provider"

View File

@ -30,8 +30,10 @@
'''
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from BasePublication import Publication
class ClusteredPublication(Publication):
def __str__(self):
@ -46,4 +48,3 @@ class ClusteredPublication(Publication):
Returns on wich node this publication has been deployed
'''
raise Exception('getNode method of ClusteredPublication must be overriden!')

View File

@ -35,6 +35,7 @@ from __future__ import unicode_literals
from django.utils.translation import ugettext_noop as _
from BaseService import Service
class ClusteredService(Service):
typeName = _('Base Clustered Service')
typeType = 'BaseClusteredService'

View File

@ -31,7 +31,6 @@
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
#from __future__ import with_statement
from BaseServiceProvider import ServiceProvider
from uds.core.util.Config import GlobalConfig
@ -42,6 +41,7 @@ logger = logging.getLogger(__name__)
HEIGHT_OF_CPU = 5
class ClusteredServiceProvider(ServiceProvider):
'''
This class represents a Clustered Service Provider, that is, a Service provider that forms a Cluster and needs
@ -58,7 +58,6 @@ class ClusteredServiceProvider(ServiceProvider):
allowInUseMigration = False # If True, means that we can migrate a service while it is being used
canRegisterServiceOnNodeFailure = False # If can register a service on another node without accesing original node
# This methods do not need to be overriden
def clusterStats(self):
stats = self.storage().getPickle('ClusterStats')
@ -66,7 +65,6 @@ class ClusteredServiceProvider(ServiceProvider):
stats = {}
return stats
# This method do not need to be overriden, but can be if it is needed (taking care ofc :-) )
def getClusterOverloadedNodes(self):
'''
@ -95,7 +93,7 @@ class ClusteredServiceProvider(ServiceProvider):
# Helper to sort array
def getNodeStatsKey(name):
val = 0
if nodesStats[name]['cpuLoad']>maxCpuLoad:
if nodesStats[name]['cpuLoad'] > maxCpuLoad:
val += HEIGHT_OF_CPU + nodesStats[name]['cpuLoad']
val += 100 - (nodeStats['freeMemory'] * 100) / nodeStats['totalMemory']
return val
@ -103,7 +101,6 @@ class ClusteredServiceProvider(ServiceProvider):
# Here we sort nodes so most overloaded servers are migrated first
return sorted(overloadedNodes, key=getNodeStatsKey)
# Same as before, this method do not need to be overriden,
def getClusterUnderloadedNodes(self):
'''
@ -153,7 +150,7 @@ class ClusteredServiceProvider(ServiceProvider):
return 0 # We will put last if do not knwo anything about a node
memUsePercent = (ns['freeMemory'] * 100) / ns['totalMemory']
val = (100-ns['cpuLoad']) * HEIGHT_OF_CPU + (100-memUsePercent)
val = (100 - ns['cpuLoad']) * HEIGHT_OF_CPU + (100 - memUsePercent)
return -val
return sorted(nodes, key=getNodeStatsKey)
@ -228,7 +225,6 @@ class ClusteredServiceProvider(ServiceProvider):
'''
return []
def getClusterNodeLoad(self, nodeId):
'''
This method must be overriden
@ -245,4 +241,3 @@ class ClusteredServiceProvider(ServiceProvider):
'''
return {'cpuLoad': None, 'freeMemory': None, 'totalMemory': None} # We could have used return {}, but i prefer this "sample template"

View File

@ -35,6 +35,7 @@ from __future__ import unicode_literals
from BaseDeployed import UserDeployment
from uds.core.util.State import State
class ClusteredUserDeployment(UserDeployment):
def startMigration(self, dstNode):

View File

@ -31,41 +31,48 @@
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
class UnsupportedException(Exception):
'''
Reflects that we request an operation that is not supported, i.e. Cancel a publication with snapshots
'''
pass
class OperationException(Exception):
'''
Reflects that the operation requested can't be acomplished, i.e. remove an snapshot without snapshot reference, cancel non running operation, etc...
'''
pass
class PublishException(Exception):
'''
Reflects thate the publication can't be done for causes we don't know in advance
'''
pass
class DeploymentException(Exception):
'''
Reflects that a deployment of a service (at cache, or assigned to user) can't be done for causes we don't know in advance
'''
pass
class CancelException(Exception):
'''
Reflects that a "cancel" operation can't be done for some reason
'''
class InvalidServiceException(Exception):
'''
Invalid service specified. The service is not ready
'''
pass
class MaxServicesReachedException(Exception):
'''
Number of maximum services has been reached, and no more services

View File

@ -30,10 +30,13 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import logging
logger = logging.getLogger(__name__)
class ServiceProviderFactory(object):
'''
This class holds the register of all known service provider modules

View File

@ -45,6 +45,7 @@ from ClusteredUserDeployment import ClusteredUserDeployment
import Exceptions
def factory():
'''
Returns factory for register/access to service providers

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
from uds.core.util import OsDetector
@ -65,7 +66,7 @@ class Transport(Module):
webTransport = False
tcTransport = False
def __init__(self,environment, values):
def __init__(self, environment, values):
super(Transport, self).__init__(environment, values)
self.initialize(values)
@ -152,7 +153,7 @@ class Transport(Module):
'''
return _('Transport empty')
def getHtmlComponent(self, id, os, componentId):
def getHtmlComponent(self, id, os, componentId): # @ReservedAssignment
'''
This is a method to let the transport add own components (images, applets, or whatever) to the rendered html
The reference to object will be the access to the uds.web.views.transcomp, with parameters transportId = ourTransportId and
@ -163,4 +164,3 @@ class Transport(Module):
def __str__(self):
return "Base OS Manager"

View File

@ -30,10 +30,13 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import logging
logger = logging.getLogger(__name__)
class TransportsFactory(object):
_factory = None
@ -49,9 +52,9 @@ class TransportsFactory(object):
def providers(self):
return self._jobs
def insert(self, type):
logger.debug('Adding transport {0} as {1}'.format(type.type(), type))
self._jobs[type.type()] = type
def insert(self, type_):
logger.debug('Adding transport {0} as {1}'.format(type_.type(), type_))
self._jobs[type_.type()] = type_
def lookup(self, typeName):
try:

View File

@ -32,8 +32,11 @@ UDS Service modules interfaces and classes.
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from BaseTransport import Transport
def factory():
'''
Returns factory for register/access to service providers

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
NONE = ''
RDP = 'rdp'

View File

@ -38,6 +38,7 @@ import logging
logger = logging.getLogger(__name__)
class gui(object):
'''
This class contains the representations of fields needed by UDS modules and
@ -73,12 +74,12 @@ class gui(object):
create new instances of this module.
'''
#: True string value
# : True string value
TRUE = 'true'
#: False string value
# : False string value
FALSE = 'false'
#: Static Callbacks simple registry
# : Static Callbacks simple registry
callbacks = {}
# Helpers
@ -91,12 +92,12 @@ class gui(object):
'''
res = []
for v in vals:
res.append( { 'id' : v, 'text' : '' } )
res.append({'id': v, 'text': ''})
return res
@staticmethod
def convertToList(vals):
return [ unicode(v) for v in vals ]
return [unicode(v) for v in vals]
@staticmethod
def choiceItem(id_, text):
@ -115,7 +116,7 @@ class gui(object):
:note: Text can be anything, the method converts it first to text before
assigning to dictionary
'''
return { 'id' : str(id_), 'text' : str(text) }
return {'id': str(id_), 'text': str(text)}
@staticmethod
def strToBool(str_):
@ -200,19 +201,19 @@ class gui(object):
EDITABLE_LIST = 'editlist'
CHECKBOX_TYPE = 'checkbox'
DEFAULT_LENTGH = 32 #: If length of some fields are not especified, this value is used as default
DEFAULT_LENTGH = 32 # : If length of some fields are not especified, this value is used as default
def __init__(self, **options):
self._data = {
'length' : options.get('length', gui.InputField.DEFAULT_LENTGH),
'required' : options['required'] if options.has_key('required') else False,
'label': options['label'] if options.has_key('label') else '',
'defvalue' : str(options['defvalue']) if options.has_key('defvalue') else '',
'rdonly' : options['rdonly'] if options.has_key('rdonly') else False, # This property only affects in "modify" operations
'order' : options['order'] if options.has_key('order') else 0,
'tooltip' : options['tooltip'] if options.has_key('tooltip') else '',
'type' : gui.InputField.TEXT_TYPE,
'value' : options['value'] if options.has_key('value') else '',
'length': options.get('length', gui.InputField.DEFAULT_LENTGH),
'required': options.get('required', False),
'label': options.get('label', ''),
'defvalue': unicode(options.get('defvalue', '')),
'rdonly': options.get('rdonly', False), # This property only affects in "modify" operations
'order': options.get('order', 0),
'tooltip': options.get('tooltip', ''),
'type': gui.InputField.TEXT_TYPE,
'value': options.get('value', ''),
}
def _type(self, type_):
@ -250,7 +251,6 @@ class gui(object):
'''
self._data['value'] = value
def guiDescription(self):
'''
Returns the dictionary with the description of this item.
@ -287,7 +287,6 @@ class gui(object):
def label(self):
return self._data['label']
class TextField(InputField):
'''
This represents a text field.
@ -327,7 +326,6 @@ class gui(object):
multiline = 8
self._data['multiline'] = multiline
class NumericField(InputField):
'''
This represents a numeric field. It apears with an spin up/down button.
@ -422,7 +420,6 @@ class gui(object):
def isSerializable(self):
return self._isSerializable
class CheckBoxField(InputField):
'''
This represents a check box field, with values "true" and "false"
@ -547,8 +544,8 @@ class gui(object):
'''
def __init__(self, **options):
super(self.__class__, self).__init__(**options)
self._data['values'] = options['values'] if options.has_key('values') else []
if options.has_key('fills'):
self._data['values'] = options('values', [])
if 'fills' in options:
# Save fnc to register as callback
fills = options['fills']
fnc = fills['function']
@ -598,8 +595,8 @@ class gui(object):
'''
def __init__(self, **options):
super(self.__class__, self).__init__(**options)
self._data['values'] = options['values'] if options.has_key('values') else []
self._data['rows'] = options['rows'] if options.has_key('rows') else -1
self._data['values'] = options.get('values', [])
self._data['rows'] = options.get('rows', -1)
self._type(gui.InputField.MULTI_CHOICE_TYPE)
def setValues(self, values):
@ -635,12 +632,12 @@ class gui(object):
'''
#: Constant for separating values at "value" method
# : Constant for separating values at "value" method
SEPARATOR = '\001'
def __init__(self, **options):
super(self.__class__, self).__init__(**options)
self._data['values'] = gui.convertToList(options['values']) if options.has_key('values') else []
self._data['values'] = gui.convertToList(options.get('values', []))
self._type(gui.InputField.EDITABLE_LIST)
def _setValue(self, values):
@ -651,14 +648,13 @@ class gui(object):
self._data['values'] = gui.convertToList(values)
class UserInterfaceType(type):
'''
Metaclass definition for moving the user interface descriptions to a usable
better place
'''
def __new__(cls, classname, bases, classDict):
newClassDict = { }
newClassDict = {}
_gui = {}
# We will keep a reference to gui elements also at _gui so we can access them easily
for attrName, attr in classDict.items():
@ -668,6 +664,7 @@ class UserInterfaceType(type):
newClassDict['_gui'] = _gui
return type.__new__(cls, classname, bases, newClassDict)
class UserInterface(object):
'''
This class provides the management for gui descriptions (user forms)
@ -681,9 +678,9 @@ class UserInterface(object):
'''
__metaclass__ = UserInterfaceType
def __init__(self, values = None):
def __init__(self, values=None):
import copy
#: If there is an array of elements to initialize, simply try to store values on form fields
# : If there is an array of elements to initialize, simply try to store values on form fields
# Generate a deep copy of inherited Gui, so each User Interface instance has its own "field" set, and do not share the "fielset" with others, what can be really dangerous
# Till now, nothing bad happened cause there where being used "serialized", but this do not have to be this way
self._gui = copy.deepcopy(self._gui) # Ensure "gui" is our own instance, deep copied from base
@ -692,11 +689,9 @@ class UserInterface(object):
if values is not None:
for k, v in self._gui.iteritems():
if values.has_key(k):
if k in values:
v.value = values[k]
def initGui(self):
'''
This method gives the oportunity to initialize gui fields before they
@ -756,7 +751,6 @@ class UserInterface(object):
logger.debug('Dict: {0}'.format(dic))
return dic
def serializeForm(self):
'''
All values stored at form fields are serialized and returned as a single
@ -799,7 +793,7 @@ class UserInterface(object):
# Set all values to defaults ones
for k in self._gui.iterkeys():
if self._gui[k].isType(gui.InputField.HIDDEN_TYPE) and self._gui[k].isSerializable() is False:
#logger.debug('Field {0} is not unserializable'.format(k))
# logger.debug('Field {0} is not unserializable'.format(k))
continue
self._gui[k].value = self._gui[k].defValue
@ -809,7 +803,7 @@ class UserInterface(object):
for txt in values.split('\002'):
k, v = txt.split('\003')
if self._gui.has_key(k):
if k in self._gui:
try:
if v[0] == '\001':
val = cPickle.loads(v[1:].encode('utf-8'))
@ -818,13 +812,13 @@ class UserInterface(object):
except:
val = ''
self._gui[k].value = val
#logger.debug('Value for {0}:{1}'.format(k, val))
# logger.debug('Value for {0}:{1}'.format(k, val))
except:
# Values can contain invalid characters, so we log every single char
logger.info('Invalid serialization data on {0} {1}'.format(self, values.encode('hex')))
@classmethod
def guiDescription(cls, obj = None):
def guiDescription(cls, obj=None):
'''
This simple method generates the gui description needed by the
administration client, so it can
@ -843,7 +837,7 @@ class UserInterface(object):
res = []
for key, val in gui._gui.iteritems():
logger.debug('{0} ### {1}'.format(key, val))
res.append( { 'name' : key, 'gui' : val.guiDescription(), 'value' : '' }, )
res.append({'name': key, 'gui': val.guiDescription(), 'value': ''})
logger.debug('>>>>>>>>>>>> Gui Description: {0} -- {1}'.format(obj, res))
return res

View File

@ -34,6 +34,7 @@ from __future__ import unicode_literals
from uds.core.util.Config import GlobalConfig
def template(template_name):
theme_path = GlobalConfig.UDS_THEME.get(True)
if theme_path == 'default':

View File

@ -30,14 +30,16 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from uds.core.Serializable import Serializable
import cPickle
import timeit
class Attribute(object):
def __init__(self, theType, value = None):
def __init__(self, theType, value=None):
self._type = theType
self.setValue(value)
@ -56,6 +58,7 @@ class Attribute(object):
else:
self._value = self._type(value)
class AutoAttributes(Serializable):
'''
Easy creation of attributes to marshal & unmarshal at modules
@ -65,9 +68,9 @@ class AutoAttributes(Serializable):
Access attrs as "self._attr1, self._attr2"
'''
#: This codec is not intended to override Serializable codec
#: Serializable codec is for encoding marshaled data,
#: while this codec is for encoding pickled data from autoattributes
# : This codec is not intended to override Serializable codec
# : Serializable codec is for encoding marshaled data,
# : while this codec is for encoding pickled data from autoattributes
ACODEC = 'zip'
def __init__(self, **kwargs):
@ -86,12 +89,12 @@ class AutoAttributes(Serializable):
def declare(self, **kwargs):
d = {}
for key,typ in kwargs.iteritems():
for key, typ in kwargs.iteritems():
d[key] = Attribute(typ)
self.dict = d
def marshal(self):
return '\2'.join( [ '%s\1%s' % (k, cPickle.dumps(v)) for k, v in self.dict.iteritems() ] ).encode(AutoAttributes.ACODEC)
return '\2'.join(['%s\1%s' % (k, cPickle.dumps(v)) for k, v in self.dict.iteritems()]).encode(AutoAttributes.ACODEC)
def unmarshal(self, data):
if data == '': # Can be empty
@ -102,7 +105,7 @@ class AutoAttributes(Serializable):
self.dict[k] = cPickle.loads(v)
def __str__(self):
str = '<AutoAttribute '
str_ = '<AutoAttribute '
for k, v in self.dict.iteritems():
str += "%s (%s) = %s" % (k, v.getType(), v.getStrValue())
return str + '>'
str_ += "%s (%s) = %s" % (k, v.getType(), v.getStrValue())
return str_ + '>'

View File

@ -40,6 +40,7 @@ import cPickle
logger = logging.getLogger(__name__)
class Cache(object):
DEFAULT_VALIDITY = 60
CODEC = 'base64' # Can be zip, hez, bzip, base64, uuencoded
@ -47,20 +48,18 @@ class Cache(object):
def __init__(self, owner):
self._owner = owner
def __getKey(self, key):
import os
h = hashlib.md5()
h.update(self._owner + key)
return h.hexdigest()
def get(self,skey, defValue = None):
def get(self, skey, defValue=None):
now = getSqlDatetime()
#logger.debug('Requesting key "%s" for cache "%s"' % (skey, self._owner,))
# logger.debug('Requesting key "%s" for cache "%s"' % (skey, self._owner,))
try:
key = self.__getKey(skey)
c = dbCache.objects.get(pk=key)
expired = now > c.created + timedelta(seconds = c.validity)
expired = now > c.created + timedelta(seconds=c.validity)
if expired:
return defValue
val = cPickle.loads(c.value.decode(Cache.CODEC))
@ -69,8 +68,8 @@ class Cache(object):
logger.debug('key not found')
return defValue
def remove(self,skey):
#logger.debug('Removing key "%s" for uService "%s"' % (skey, self._owner))
def remove(self, skey):
# logger.debug('Removing key "%s" for uService "%s"' % (skey, self._owner))
try:
key = self.__getKey(skey)
dbCache.objects.get(pk=key).delete()
@ -80,15 +79,15 @@ class Cache(object):
def clean(self):
Cache.delete(self._owner)
def put(self, skey, value, validity = None):
#logger.debug('Saving key "%s" for cache "%s"' % (skey, self._owner,))
def put(self, skey, value, validity=None):
# logger.debug('Saving key "%s" for cache "%s"' % (skey, self._owner,))
if validity == None:
validity = Cache.DEFAULT_VALIDITY
key = self.__getKey(skey)
value = cPickle.dumps(value).encode(Cache.CODEC)
now = getSqlDatetime()
try:
dbCache.objects.create( owner = self._owner, key = key, value = value, created = now, validity = validity )
dbCache.objects.create(owner=self._owner, key=key, value=value, created=now, validity=validity)
except Exception:
# Already exists, modify it
c = dbCache.objects.get(pk=key)
@ -100,7 +99,7 @@ class Cache(object):
c.save()
def refresh(self, skey):
#logger.debug('Refreshing key "%s" for cache "%s"' % (skey, self._owner,))
# logger.debug('Refreshing key "%s" for cache "%s"' % (skey, self._owner,))
try:
key = self.__getKey(skey)
c = dbCache.objects.get(pk=key)
@ -119,11 +118,10 @@ class Cache(object):
dbCache.cleanUp()
@staticmethod
def delete(owner = None):
#logger.info("Deleting cache items")
def delete(owner=None):
# logger.info("Deleting cache items")
if owner == None:
objects = dbCache.objects.all()
else:
objects = dbCache.objects.filter(owner=owner)
objects.delete()

View File

@ -43,13 +43,14 @@ GLOBAL_SECTION = 'UDS'
SECURITY_SECTION = 'Security'
CLUSTER_SECTION = 'Cluster'
class Config(object):
'''
Keeps persistend configuration data
'''
class _Value(object):
def __init__(self, section, key, default = '', crypt = False, longText = False):
def __init__(self, section, key, default='', crypt=False, longText=False):
self._section = section
self._key = key
self._crypt = crypt
@ -60,10 +61,10 @@ class Config(object):
self._default = CryptoManager.manager().encrypt(default)
self._data = None
def get(self, force = False):
def get(self, force=False):
try:
if force or self._data is None:
#logger.debug('Accessing db config {0}.{1}'.format(self._section.name(), self._key))
# logger.debug('Accessing db config {0}.{1}'.format(self._section.name(), self._key))
readed = dbConfig.objects.filter(section=self._section.name(), key=self._key)[0]
self._data = readed.value
self._crypt = [self._crypt, True][readed.crypt] # True has "higher" precedende than False
@ -71,7 +72,7 @@ class Config(object):
except Exception:
# Not found
if self._default != '' and self._crypt:
self.set( CryptoManager.manager().decrypt(self._default) )
self.set(CryptoManager.manager().decrypt(self._default))
elif not self._crypt:
self.set(self._default)
self._data = self._default
@ -80,7 +81,7 @@ class Config(object):
else:
return self._data
def getInt(self, force = False):
def getInt(self, force=False):
try:
return int(self.get(force))
except Exception:
@ -91,7 +92,7 @@ class Config(object):
logger.error('Default value for {0}.{1} is also invalid (integer expected)'.format(self._section, self._key))
return -1
def getBool(self, force = False):
def getBool(self, force=False):
if self.get(force) == '0':
return False
return True
@ -129,27 +130,25 @@ class Config(object):
def __init__(self, sectionName):
self._sectionName = sectionName
def value(self, key, default = ''):
def value(self, key, default=''):
return Config._Value(self, key, default)
def valueCrypt(self, key, default = ''):
def valueCrypt(self, key, default=''):
return Config._Value(self, key, default, True)
def valueLong(self, key, default = ''):
def valueLong(self, key, default=''):
return Config._Value(self, key, default, False, True)
def name(self):
return self._sectionName
@staticmethod
def section(sectionName):
return Config._Section(sectionName)
@staticmethod
def enumerate():
for cfg in dbConfig.objects.all():
for cfg in dbConfig.objects.all().order_by('key'):
logger.debug('{0}.{1}:{2}'.format(cfg.section, cfg.key, cfg.value))
if cfg.crypt is True:
val = Config.section(cfg.section).valueCrypt(cfg.key)
@ -170,6 +169,7 @@ class Config(object):
except Exception:
pass
class GlobalConfig(object):
'''
Simple helper to keep track of global configuration
@ -273,9 +273,10 @@ class GlobalConfig(object):
except:
logger.debug('Config table do not exists!!!, maybe we are installing? :-)')
# Context processor
def context_processor(request):
return { 'css_path' : GlobalConfig.CSS.get() }
return {'css_path': GlobalConfig.CSS.get()}
# Initialization of global configurations
GlobalConfig.initialize()

View File

@ -39,8 +39,9 @@ from uds.web import errors
from time import sleep
from functools import wraps
# Have to test these decorators before using them
def retryOnException(retries=3, delay = 0):
def retryOnException(retries=3, delay=0):
'''
Decorator to retry
'''
@ -62,7 +63,7 @@ def retryOnException(retries=3, delay = 0):
# Decorator that protects pages that needs at least a browser version
# Default is to deny IE < 9
def denyBrowsers(browsers=['ie<9'], errorResponse=lambda request:errors.errorView(request,errors.BROWSER_NOT_SUPPORTED)):
def denyBrowsers(browsers=['ie<9'], errorResponse=lambda request: errors.errorView(request, errors.BROWSER_NOT_SUPPORTED)):
'''
Decorator to set protection to access page
Look for samples at uds.core.web.views

View File

@ -42,17 +42,18 @@ Android = 'Android'
iPad = 'iPad'
iPhone = 'iPhone'
knownOss = [ WindowsPhone, Android, Linux, Windows, Macintosh, iPad, iPhone ] # Android is linux also, so it is cheched on first place
knownOss = [WindowsPhone, Android, Linux, Windows, Macintosh, iPad, iPhone] # Android is linux also, so it is cheched on first place
allOss = list(knownOss)
desktopOss = [Linux, Windows, Macintosh]
mobilesODD = list(set(allOss)-set(desktopOss))
mobilesODD = list(set(allOss) - set(desktopOss))
def getOsFromUA(ua):
'''
Basic OS Client detector (very basic indeed :-))
'''
res = {'OS' : 'Unknown', 'Version' : 'unused' }
res = {'OS': 'Unknown', 'Version': 'unused'}
for os in knownOss:
try:
ua.index(os)
@ -63,4 +64,3 @@ def getOsFromUA(ua):
logger.debug('User-Agent: {0}'.format(ua))
logger.debug('Detected OS: {0}'.format(res))
return res

View File

@ -30,10 +30,11 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django.utils.translation import ugettext_noop as _, ugettext
# States for different objects. Not all objects supports all States
class State(object):
'''
@ -57,17 +58,30 @@ class State(object):
RUNNING = 'W'
FOR_EXECUTE = 'X'
string = { ACTIVE: _('Active'), INACTIVE: _('Inactive'), BLOCKED: _('Blocked'), LAUNCHING: _('Waiting publication'),
PREPARING: _('In preparation'), USABLE: _('Valid'),
REMOVABLE: _('Waiting for removal'), REMOVING: _('Removing'), REMOVED: _('Removed'), CANCELED: _('Canceled'),
CANCELING: _('Canceling'), ERROR: _('Error'), RUNNING: _('Running'), FINISHED: _('Finished'), FOR_EXECUTE: _('Waiting execution'),
BALANCING: _('Balancing') }
string = {
ACTIVE: _('Active'),
INACTIVE: _('Inactive'),
BLOCKED: _('Blocked'),
LAUNCHING: _('Waiting publication'),
PREPARING: _('In preparation'),
USABLE: _('Valid'),
REMOVABLE: _('Waiting for removal'),
REMOVING: _('Removing'),
REMOVED: _('Removed'),
CANCELED: _('Canceled'),
CANCELING: _('Canceling'),
ERROR: _('Error'),
RUNNING: _('Running'),
FINISHED: _('Finished'),
FOR_EXECUTE: _('Waiting execution'),
BALANCING: _('Balancing')
}
# States that are merely for "information" to the user. They don't contain any usable instance
INFO_STATES = [REMOVED, CANCELED, ERROR]
# States that indicates that the service is "Valid" for a user
VALID_STATES = [USABLE,PREPARING]
VALID_STATES = [USABLE, PREPARING]
# Publication States
PUBLISH_STATES = [LAUNCHING, PREPARING]
@ -144,4 +158,3 @@ class State(object):
for k, v in State.string.iteritems():
res[k] = ugettext(v)
return res

View File

@ -30,6 +30,8 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
class StateQueue(object):
@ -37,7 +39,7 @@ class StateQueue(object):
self.reset()
def __str__(self):
res = '<StateQueue Current: %s, Queue: (%s)>' % (self._current , ','.join( state for state in self._queue ))
res = '<StateQueue Current: %s, Queue: (%s)>' % (self._current, ','.join(state for state in self._queue))
return res
def clearQueue(self):
@ -55,7 +57,7 @@ class StateQueue(object):
return self._current
def contains(self, state):
#if self._queue.co
# if self._queue.co
for s in self._queue:
if s == state:
return True

View File

@ -30,6 +30,7 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from django.db import transaction
from uds.models import Storage as dbStorage
@ -39,6 +40,7 @@ import cPickle
logger = logging.getLogger(__name__)
class Storage(object):
CODEC = 'base64' # Can be zip, hez, bzip, base64, uuencoded
@ -51,16 +53,16 @@ class Storage(object):
h.update(str(key))
return h.hexdigest()
def saveData(self, skey, data, attr1 = None):
def saveData(self, skey, data, attr1=None):
key = self.__getKey(skey)
data = data.encode(Storage.CODEC)
attr1 = '' if attr1 == None else attr1
try:
with transaction.atomic():
dbStorage.objects.create(owner = self._owner, key = key, data = data, attr1 = attr1 )
dbStorage.objects.create(owner=self._owner, key=key, data=data, attr1=attr1)
except Exception:
with transaction.atomic():
dbStorage.objects.filter(key=key).update(owner = self._owner, data = data, attr1 = attr1)
dbStorage.objects.filter(key=key).update(owner=self._owner, data=data, attr1=attr1)
logger.debug('Key saved')
def put(self, skey, data):
@ -69,7 +71,7 @@ class Storage(object):
def putPickle(self, skey, data):
return self.saveData(skey, cPickle.dumps(data))
def updateData(self, skey, data, attr1 = None):
def updateData(self, skey, data, attr1=None):
self.saveData(skey, data, attr1)
def readData(self, skey):
@ -108,7 +110,7 @@ class Storage(object):
dbStorage.objects.unlock()
@staticmethod
def delete(owner = None):
def delete(owner=None):
logger.info("Deleting storage items")
if owner == None:
objects = dbStorage.objects.all()
@ -118,6 +120,6 @@ class Storage(object):
def locateByAttr1(self, attr1):
res = []
for v in dbStorage.objects.filter( attr1 = attr1 ):
res.append( v.data.decode(Storage.CODEC) )
for v in dbStorage.objects.filter(attr1=attr1):
res.append(v.data.decode(Storage.CODEC))
return res

View File

@ -36,7 +36,8 @@ import logging
logger = logging.getLogger(__name__)
DEFAULT_QUEUE_SIZE=32
DEFAULT_QUEUE_SIZE = 32
class Worker(Thread):
def __init__(self, tasks):
@ -62,6 +63,7 @@ class Worker(Thread):
self._tasks.task_done()
class ThreadPool:
def __init__(self, num_threads, queueSize=DEFAULT_QUEUE_SIZE):
self._tasks = Queue.Queue(queueSize)

View File

@ -40,9 +40,10 @@ logger = logging.getLogger(__name__)
MAX_SEQ = 1000000000000000
class UniqueIDGenerator(object):
def __init__(self, typeName, owner, baseName = 'uds'):
def __init__(self, typeName, owner, baseName='uds'):
self._owner = owner + typeName
self._baseName = baseName
@ -50,7 +51,7 @@ class UniqueIDGenerator(object):
self._baseName = newBaseName
def __filter(self, rangeStart, rangeEnd=MAX_SEQ):
return dbUniqueId.objects.filter( basename = self._baseName, seq__gte=rangeStart, seq__lte=rangeEnd )
return dbUniqueId.objects.filter(basename=self._baseName, seq__gte=rangeStart, seq__lte=rangeEnd)
def get(self, rangeStart=0, rangeEnd=MAX_SEQ):
'''
@ -64,18 +65,18 @@ class UniqueIDGenerator(object):
flt = self.__filter(rangeStart, rangeEnd)
try:
item = flt.filter(assigned=False).order_by('seq')[0]
dbUniqueId.objects.filter(id=item.id).update( owner = self._owner, assigned = True, stamp = stamp )
dbUniqueId.objects.filter(id=item.id).update(owner=self._owner, assigned=True, stamp=stamp)
seq = item.seq
except Exception, e: # No free element found
except Exception: # No free element found
try:
last = flt.filter(assigned = True)[0] # DB Returns correct order so the 0 item is the last
last = flt.filter(assigned=True)[0] # DB Returns correct order so the 0 item is the last
seq = last.seq + 1
except Exception: # If there is no assigned at database
seq = rangeStart
logger.debug('Found seq {0}'.format(seq))
if seq > rangeEnd:
return -1 # No ids free in range
dbUniqueId.objects.create( owner = self._owner, basename = self._baseName, seq = seq, assigned = True, stamp = stamp)
dbUniqueId.objects.create(owner=self._owner, basename=self._baseName, seq=seq, assigned=True, stamp=stamp)
return seq
except Exception:
logger.exception('Generating unique id sequence')
@ -87,7 +88,7 @@ class UniqueIDGenerator(object):
try:
dbUniqueId.objects.lock()
obj = dbUniqueId.objects.get( owner=self._owner, seq=seq)
obj = dbUniqueId.objects.get(owner=self._owner, seq=seq)
obj.owner = toUidGen._owner
obj.basename = toUidGen._baseName
obj.stamp = getSqlDatetime(True)
@ -100,31 +101,28 @@ class UniqueIDGenerator(object):
finally:
dbUniqueId.objects.unlock()
def free(self, seq):
try:
logger.debug('Freeing seq {0} from {1} ({2})'.format(seq, self._owner, self._baseName))
dbUniqueId.objects.lock()
flt = self.__filter(0).filter(owner = self._owner, seq=seq).update(owner='', assigned=False, stamp = getSqlDatetime(True))
flt = self.__filter(0).filter(owner=self._owner, seq=seq).update(owner='', assigned=False, stamp=getSqlDatetime(True))
if flt > 0:
self.__purge()
finally:
dbUniqueId.objects.unlock()
def __purge(self):
try:
last = self.__filter(0).filter(assigned=True)[0]
seq = last.seq+1
seq = last.seq + 1
except:
seq = 0
self.__filter(seq).delete() # Clean ups all unassigned after last assigned in this range
def release(self):
try:
dbUniqueId.objects.lock()
dbUniqueId.objects.filter(owner=self._owner).update(assigned=False, owner='', stamp = getSqlDatetime(True))
dbUniqueId.objects.filter(owner=self._owner).update(assigned=False, owner='', stamp=getSqlDatetime(True))
self.__purge()
finally:
dbUniqueId.objects.unlock()
@ -133,8 +131,7 @@ class UniqueIDGenerator(object):
stamp = getSqlDatetime(True)
try:
dbUniqueId.objects.lock()
dbUniqueId.objects.filter(owner=self._owner, stamp__lt=stamp).update(assigned=False, owner='', stamp = stamp)
dbUniqueId.objects.filter(owner=self._owner, stamp__lt=stamp).update(assigned=False, owner='', stamp=stamp)
self.__purge()
finally:
dbUniqueId.objects.unlock()

View File

@ -32,12 +32,13 @@
'''
from __future__ import unicode_literals
from UniqueIDGenerator import UniqueIDGenerator
import logging, re
import logging
import re
logger = logging.getLogger(__name__)
class UniqueMacGenerator(UniqueIDGenerator):
def __init__(self, owner):
@ -56,9 +57,9 @@ class UniqueMacGenerator(UniqueIDGenerator):
return self.__toMac(super(UniqueMacGenerator, self).get(firstMac, lastMac))
def transfer(self, mac, toUMgen):
super(UniqueMacGenerator, self).transfer( self.__toInt(mac), toUMgen )
super(UniqueMacGenerator, self).transfer(self.__toInt(mac), toUMgen)
def free(self, mac):
super(UniqueMacGenerator, self).free( self.__toInt(mac) )
super(UniqueMacGenerator, self).free(self.__toInt(mac))
# Release is inherited, no mod needed

View File

@ -30,16 +30,18 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
from UniqueIDGenerator import UniqueIDGenerator
import logging
logger = logging.getLogger(__name__)
class UniqueNameGenerator(UniqueIDGenerator):
def __init__(self, owner):
super(UniqueNameGenerator, self).__init__('name', owner, )
super(UniqueNameGenerator, self).__init__('name', owner,)
def __toName(self, seq, length):
if seq == -1:
@ -49,7 +51,7 @@ class UniqueNameGenerator(UniqueIDGenerator):
def get(self, baseName, length=5):
self.setBaseName(baseName)
minVal = 0
maxVal = 10**length - 1
maxVal = 10 ** length - 1
return self.__toName(super(UniqueNameGenerator, self).get(minVal, maxVal), length)
def transfer(self, baseName, name, toUNGen):

View File

@ -37,7 +37,8 @@ import socket
logger = logging.getLogger(__name__)
def testServer(host, port, timeOut = 4):
def testServer(host, port, timeOut=4):
try:
logger.debug('Checking connection to {0}:{1} with {2} seconds timeout'.format(host, port, timeOut))
sock = socket.create_connection((host, int(port)), timeOut)
@ -46,5 +47,3 @@ def testServer(host, port, timeOut = 4):
logger.debug('Exception checking {0}:{1} with {2} timeout: {3}'.format(host, port, timeOut, e))
return False
return True

View File

@ -46,10 +46,11 @@ def parseDate(dateToParse):
if get_language() == 'fr':
date_format = '%d/%m/%Y'
else:
date_format = formats.get_format('SHORT_DATE_FORMAT').replace('Y', '%Y').replace('m','%m').replace('d','%d')
date_format = formats.get_format('SHORT_DATE_FORMAT').replace('Y', '%Y').replace('m', '%m').replace('d', '%d')
return datetime.datetime.strptime(dateToParse, date_format).date()
def dateToLiteral(date):
from django.utils.translation import get_language
from django.utils import formats
@ -62,12 +63,13 @@ def dateToLiteral(date):
return date
def extractKey(dictionary, key, **kwargs):
format_ = kwargs.get('format', '{0}')
default = kwargs.get('default', '')
if dictionary.has_key(key):
if key in dictionary:
value = format_.format(dictionary[key])
del dictionary[key]
else:
@ -88,13 +90,14 @@ _chrome = re.compile('Chrome/([0-9]+)\.([0-9]+)')
_webkit = re.compile('AppleWebKit/([0-9]+)\.([0-9]+)')
_browsers = {
'ie' : [_trident, _msie],
'ie': [_trident, _msie],
'opera': [_opera],
'firefox': [_firefox],
'chrome': [_chrome],
'webkit': [_webkit],
}
def checkBrowser(user_agent, browser):
'''
Known browsers right now:
@ -102,7 +105,6 @@ def checkBrowser(user_agent, browser):
ie<[version]
'''
# Split brwosers we look for
needs_browser = None
needs_version = 0
needs = ''

View File

@ -30,13 +30,14 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import logging
logger = logging.getLogger(__name__)
useLogger = logging.getLogger('useLog')
# Logging levels
OTHER,DEBUG,INFO,WARN,ERROR,FATAL = (10000*(x+1) for x in xrange(6))
OTHER, DEBUG, INFO, WARN, ERROR, FATAL = (10000 * (x + 1) for x in xrange(6))
# Logging sources
INTERNAL, ACTOR, TRANSPORT, OSMANAGER, UNKNOWN, WEB, ADMIN, SERVICE = ('internal', 'actor', 'transport', 'osmanager', 'unknown', 'web', 'admin', 'service')
@ -54,7 +55,8 @@ __nameLevels = {
}
# Reverse dict of names
__valueLevels = dict((v,k) for k, v in __nameLevels.iteritems())
__valueLevels = dict((v, k) for k, v in __nameLevels.iteritems())
def logLevelFromStr(str_):
'''
@ -62,6 +64,7 @@ def logLevelFromStr(str_):
'''
return __nameLevels.get(str_.upper(), OTHER)
def logStrFromLevel(level):
return __valueLevels.get(level, 'OTHER')
@ -70,12 +73,12 @@ def useLog(type_, serviceUniqueId, serviceIp, username):
useLogger.info('|'.join([type_, serviceUniqueId, serviceIp, username]))
def doLog(wichObject, level, message, source = UNKNOWN, avoidDuplicates = True):
def doLog(wichObject, level, message, source=UNKNOWN, avoidDuplicates=True):
from uds.core.managers import logManager
logManager().doLog(wichObject, level, message, source, avoidDuplicates)
def getLogs(wichObject, limit = None):
def getLogs(wichObject, limit=None):
'''
Get the logs associated with "wichObject", limiting to "limit" (default is GlobalConfig.MAX_LOGS_PER_ELEMENT)
'''
@ -87,6 +90,7 @@ def getLogs(wichObject, limit = None):
return logManager().getLogs(wichObject, limit)
def clearLogs(wichObject):
'''
Clears the logs associated with the object using the logManager

View File

@ -30,17 +30,20 @@
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import os.path, pkgutil
import sys, imp
import os.path
import pkgutil
import sys
import imp
import logging
import uds.dispatchers
import uds.dispatchers # @UnusedImport
logger = logging.getLogger(__name__)
patterns = []
def loadModulesUrls():
logger.debug('Looking for dispatching modules')
global patterns
@ -54,10 +57,9 @@ def loadModulesUrls():
try:
patterns += mod.urlpatterns
except:
logger.info( 'Module {0} has no url patterns'.format(mod))
logger.info('Module {0} has no url patterns'.format(mod))
except Exception, e:
logger.debug(e)
pass
return patterns

View File

@ -44,6 +44,7 @@ re3Asterisk = re.compile('^([0-9]{1,3})\.\*\.?\*?\.?\*?$')
reRange = re.compile('^([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$')
reHost = re.compile('^([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$')
def ipToLong(ip):
'''
convert decimal dotted quad string to long integer
@ -54,6 +55,7 @@ def ipToLong(ip):
except:
return 0 # Invalid values will map to "0.0.0.0" --> 0
def longToIp(n):
'''
convert long int to dotted quad string
@ -62,16 +64,16 @@ def longToIp(n):
d = 256 * 256 * 256
q = []
while d > 0:
m,n = divmod(n,d)
m, n = divmod(n, d)
q.append(str(m))
d = d/256
d = d / 256
return '.'.join(q)
except:
return '0.0.0.0' # Invalid values will map to "0.0.0.0"
def networksFromString(strNets, allowMultipleNetworks = True):
def networksFromString(strNets, allowMultipleNetworks=True):
'''
Parses the network from strings in this forms:
- A.* (or A.*.* or A.*.*.*)
@ -93,17 +95,17 @@ def networksFromString(strNets, allowMultipleNetworks = True):
raise Exception()
def toNum(*args):
start = 256*256*256
start = 256 * 256 * 256
val = 0
for n in args:
val += start*int(n)
val += start * int(n)
start /= 256
return val
def maskFromBits(nBits):
v = 0
for n in xrange(nBits):
v |= 1<<(31-n)
v |= 1 << (31 - n)
return v
if allowMultipleNetworks is True:
@ -129,7 +131,7 @@ def networksFromString(strNets, allowMultipleNetworks = True):
val = toNum(*m.groups())
bits = maskFromBits(bits)
noBits = ~bits & 0xffffffff
return (val&bits, val|noBits)
return (val & bits, val | noBits)
m = reMask.match(strNets)
if m is not None:
@ -137,7 +139,7 @@ def networksFromString(strNets, allowMultipleNetworks = True):
val = toNum(*(m.groups()[0:4]))
bits = toNum(*(m.groups()[4:8]))
noBits = ~bits & 0xffffffff
return (val&bits, val|noBits)
return (val & bits, val | noBits)
m = reRange.match(strNets)
if m is not None:
@ -158,10 +160,10 @@ def networksFromString(strNets, allowMultipleNetworks = True):
m = v[0].match(strNets)
if m is not None:
check(*m.groups())
val = toNum(*(m.groups()[0:v[1]+1]))
bits = maskFromBits(v[1]*8)
val = toNum(*(m.groups()[0:v[1] + 1]))
bits = maskFromBits(v[1] * 8)
noBits = ~bits & 0xffffffff
return (val&bits, val|noBits)
return (val & bits, val | noBits)
# No pattern recognized, invalid network
raise Exception()
@ -170,9 +172,9 @@ def networksFromString(strNets, allowMultipleNetworks = True):
def ipInNetwork(ip, network):
if isinstance(ip,unicode) or isinstance(ip,str):
if isinstance(ip, unicode) or isinstance(ip, str):
ip = ipToLong(ip)
if isinstance(network,unicode) or isinstance(network,str):
if isinstance(network, unicode) or isinstance(network, str):
network = networksFromString(network)
for net in network:

View File

@ -30,15 +30,17 @@
'''
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import threading
_requests = {}
def getRequest():
return _requests[threading._get_ident()]
class GlobalRequestMiddleware(object):
def process_request(self, request):
_requests[threading._get_ident()] = request
return None

View File

@ -43,15 +43,16 @@ import logging
logger = logging.getLogger(__name__)
class AssignedAndUnused(Job):
frecuency = GlobalConfig.CHECK_UNUSED_TIME.getInt()
friendly_name = 'Unused services checker'
def __init__(self, environment):
super(AssignedAndUnused,self).__init__(environment)
super(AssignedAndUnused, self).__init__(environment)
def run(self):
since_state = getSqlDatetime() - timedelta( seconds = GlobalConfig.CHECK_UNUSED_TIME.getInt() )
since_state = getSqlDatetime() - timedelta(seconds=GlobalConfig.CHECK_UNUSED_TIME.getInt())
for ds in DeployedService.objects.all():
# If do not needs os manager, this is
with transaction.atomic():
@ -59,11 +60,10 @@ class AssignedAndUnused(Job):
osm = ds.osmanager.getInstance()
if osm.processUnusedMachines is True:
logger.debug('Processing unused services for {0}'.format(osm))
for us in ds.assignedUserServices().select_for_update().filter(in_use=False,state_date__lt=since_state, state=State.USABLE, os_state=State.USABLE):
for us in ds.assignedUserServices().select_for_update().filter(in_use=False, state_date__lt=since_state, state=State.USABLE, os_state=State.USABLE):
logger.debug('Found unused assigned service {0}'.format(us))
osm.processUnused(us)
else: # No os manager, simply remove unused services in specified time
for us in ds.assignedUserServices().select_for_update().filter(in_use=False,state_date__lt=since_state, state=State.USABLE, os_state=State.USABLE):
for us in ds.assignedUserServices().select_for_update().filter(in_use=False, state_date__lt=since_state, state=State.USABLE, os_state=State.USABLE):
logger.debug('Found unused assigned service {0}'.format(us))
us.remove()

View File

@ -41,14 +41,13 @@ logger = logging.getLogger(__name__)
class CacheCleaner(Job):
frecuency = 3600*24 # Once a day
frecuency = 3600 * 24 # Once a day
friendly_name = 'Utility Cache Cleaner'
def __init__(self, environment):
super(CacheCleaner,self).__init__(environment)
super(CacheCleaner, self).__init__(environment)
def run(self):
logger.debug('Starting cache cleanup')
Cache.cleanUp()
logger.debug('Done cache cleanup')

View File

@ -47,9 +47,10 @@ GETCLUSTERSTATS_TAG = 'ClstrStats'
BALANCECLUSTER_TAG = 'ClstrBalance'
MIGRATETASK_TAG = 'ClstrMigrate'
# Utility to get all providers that are derived from
def getClusteredProvidersFromDB():
#services.ClusteredServiceProvider.
# services.ClusteredServiceProvider.
from uds.core import services
p = services.ClusteredServiceProvider
@ -59,9 +60,10 @@ def getClusteredProvidersFromDB():
if prov.isOfType(cls.typeType):
yield prov
class ClusterUpdateStatsTask(DelayedTask):
def __init__(self, providerId):
super(ClusterUpdateStatsTask,self).__init__()
super(ClusterUpdateStatsTask, self).__init__()
self._id = providerId
def run(self):
@ -73,8 +75,11 @@ class ClusterUpdateStatsTask(DelayedTask):
stats = {}
for node in nodes:
s = cluster.getClusterNodeLoad(node['id'])
stats[node['id']] = { 'cpuLoad': s.get('cpuLoad', None), 'freeMemory': s.get('freeMemory', None),
'totalMemory': s.get('totalMemory') }
stats[node['id']] = {
'cpuLoad': s.get('cpuLoad', None),
'freeMemory': s.get('freeMemory', None),
'totalMemory': s.get('totalMemory')
}
cluster.storage().putPickle('ClusterStats', stats)
except:
logger.exception('Update Stats Task')
@ -88,7 +93,7 @@ class ClusterUpdateStats(Job):
friendly_name = 'Clustered Providers Statistics Updater'
def __init__(self, environment):
super(ClusterUpdateStats,self).__init__(environment)
super(ClusterUpdateStats, self).__init__(environment)
def run(self):
logger.debug('Clustered Service manager started')
@ -97,6 +102,7 @@ class ClusterUpdateStats(Job):
ct = ClusterUpdateStatsTask(p.id)
ct.register(0, '{0}_{1}'.format(GETCLUSTERSTATS_TAG, p.id), True)
# Balancing nodes related
class ClusterMigrationTask(DelayedTask):
def __init__(self, service):
@ -146,8 +152,6 @@ class ClusterMigrationTask(DelayedTask):
return
DelayedTaskRunner.runner().insert(ClusterUpdateStats(userService), userServiceInstance.suggestedTime, ClusterUpdateStats + str(userService.id))
@transaction.atomic
def run(self):
logger.debug('Checking user service finished migrating {0}'.format(self._serviceId))
uService = None
@ -175,6 +179,7 @@ class ClusterMigrationTask(DelayedTask):
except Exception:
logger.error('Can\'t update state of uService object')
class ClusterBalancingTask(DelayedTask):
def __init__(self, providerId):
super(ClusterBalancingTask, self).__init__()
@ -241,7 +246,7 @@ class ClusterBalancingJob(Job):
friendly_name = 'Clustered Providers Balancing job'
def __init__(self, environment):
super(ClusterBalancingJob,self).__init__(environment)
super(ClusterBalancingJob, self).__init__(environment)
def run(self):
'''
@ -252,4 +257,3 @@ class ClusterBalancingJob(Job):
logger.debug('Checking balancing on {0}'.format(p.name))
cb = ClusterBalancingTask(p.id)
cb.register(0, '{0}_{1}'.format(BALANCECLUSTER_TAG, p.id), True)

View File

@ -42,15 +42,16 @@ import logging
logger = logging.getLogger(__name__)
class DeployedServiceInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'Deployed Service Info Cleaner'
def __init__(self, environment):
super(DeployedServiceInfoItemsCleaner,self).__init__(environment)
super(DeployedServiceInfoItemsCleaner, self).__init__(environment)
def run(self):
removeFrom = getSqlDatetime() - timedelta(seconds = GlobalConfig.KEEP_INFO_TIME.getInt())
removeFrom = getSqlDatetime() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.getInt())
DeployedService.objects.filter(state__in=State.INFO_STATES, state_date__lt=removeFrom).delete()
@ -59,7 +60,7 @@ class DeployedServiceRemover(Job):
friendly_name = 'Deployed Service Cleaner'
def __init__(self, environment):
super(DeployedServiceRemover,self).__init__(environment)
super(DeployedServiceRemover, self).__init__(environment)
@transaction.atomic
def startRemovalOf(self, ds):
@ -79,7 +80,6 @@ class DeployedServiceRemover(Job):
ds.name = ds.name + ' (removed)'
ds.save()
@transaction.atomic
def continueRemovalOf(self, ds):
# First, we remove all publications and user services in "info_state"
@ -100,7 +100,7 @@ class DeployedServiceRemover(Job):
ds.publications.filter(state__in=State.INFO_STATES).delete()
if ds.publications.count() is 0:
ds.removed() # Mark it as removed, clean later from database
except Exception as e:
except Exception:
logger.exception('Cought unexpected exception at continueRemovalOf: ')
def run(self):
@ -115,5 +115,3 @@ class DeployedServiceRemover(Job):
logger.debug('Found a deployed service in removing state, continuing removal of {0}'.format(rems))
for rem in rems:
self.continueRemovalOf(rem)

View File

@ -43,15 +43,16 @@ import logging
logger = logging.getLogger(__name__)
class HangedCleaner(Job):
frecuency = GlobalConfig.MAX_INITIALIZING_TIME.getInt()
friendly_name = 'Hanged services checker'
def __init__(self, environment):
super(HangedCleaner,self).__init__(environment)
super(HangedCleaner, self).__init__(environment)
def run(self):
since_state = getSqlDatetime() - timedelta( seconds = GlobalConfig.MAX_INITIALIZING_TIME.getInt() )
since_state = getSqlDatetime() - timedelta(seconds=GlobalConfig.MAX_INITIALIZING_TIME.getInt())
# Filter for locating machine not ready
flt = Q(state_date__lt=since_state, state=State.PREPARING) | Q(state_date__lt=since_state, state=State.USABLE, os_state=State.PREPARING)
@ -62,4 +63,3 @@ class HangedCleaner(Job):
log.doLog(us, log.ERROR, 'User Service seems to be hanged. Removing it.', log.INTERNAL)
log.doLog(ds, log.ERROR, 'Removing user service {0} because it seems to be hanged'.format(us.friendly_name))
us.removeOrCancel()

View File

@ -47,20 +47,21 @@ logger = logging.getLogger(__name__)
class PublicationInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'Publications Info Cleaner'
now = getSqlDatetime()
def __init__(self, environment):
super(PublicationInfoItemsCleaner,self).__init__(environment)
super(PublicationInfoItemsCleaner, self).__init__(environment)
def run(self):
removeFrom = getSqlDatetime() - timedelta(seconds = GlobalConfig.KEEP_INFO_TIME.getInt(True))
removeFrom = getSqlDatetime() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.getInt(True))
DeployedServicePublication.objects.filter(state__in=State.INFO_STATES, state_date__lt=removeFrom).delete()
class PublicationCleaner(Job):
frecuency = GlobalConfig.REMOVAL_CHECK.getInt() # Request run publication "removal" every configued seconds. If config value is changed, it will be used at next reload
friendly_name = 'Publication Cleaner'
def __init__(self, environment):
super(PublicationCleaner,self).__init__(environment)
super(PublicationCleaner, self).__init__(environment)
def run(self):
removables = DeployedServicePublication.objects.filter(state=State.REMOVABLE)

View File

@ -59,11 +59,11 @@ class ServiceCacheUpdater(Job):
friendly_name = 'Service Cache Updater'
def __init__(self, environment):
super(ServiceCacheUpdater,self).__init__(environment)
super(ServiceCacheUpdater, self).__init__(environment)
@staticmethod
def calcProportion(max, actual):
return actual * 10000 / max
def calcProportion(max_, actual):
return actual * 10000 / (max_ or 1)
@staticmethod
def __notifyRestrain(deployedService):
@ -76,13 +76,13 @@ class ServiceCacheUpdater(Job):
# First we get all deployed services that could need cache generation
DeployedService.objects.update()
# We start filtering out the deployed services that do not need caching at all.
whichNeedsCaching = DeployedService.objects.filter(Q(initial_srvs__gt=0) | Q(cache_l1_srvs__gt=0)).filter(max_srvs__gt=0,state=State.ACTIVE)
whichNeedsCaching = DeployedService.objects.filter(Q(initial_srvs__gt=0) | Q(cache_l1_srvs__gt=0)).filter(max_srvs__gt=0, state=State.ACTIVE)
# We will get the one that proportionally needs more cache
selected = None
cachedL1, cachedL2, assigned = 0,0,0
cachedL1, cachedL2, assigned = 0, 0, 0
toCacheL1 = False # Mark for prefering update L1 cache before L2 cache
prop = ServiceCacheUpdater.calcProportion(1,1)
prop = ServiceCacheUpdater.calcProportion(1, 1)
for ds in whichNeedsCaching:
ds.userServices.update() # Cleans cached queries
# If this deployedService don't have a publication active and needs it, ignore it
@ -126,7 +126,7 @@ class ServiceCacheUpdater(Job):
logger.debug('We have more services in L2 cache than configured, decreasing it')
cachedL1, cachedL2, assigned = inCacheL1, inCacheL2, inAssigned
selected = ds
prop = ServiceCacheUpdater.calcProportion(1,0)
prop = ServiceCacheUpdater.calcProportion(1, 0)
# If this service don't allows more starting user services, continue
if UserServiceManager.manager().canInitiateServiceFromDeployedService(ds) is False:
@ -146,7 +146,7 @@ class ServiceCacheUpdater(Job):
# We skip it if already at max
if totalL1Assigned == ds.max_srvs:
continue;
continue
if totalL1Assigned < ds.initial_srvs:
p = ServiceCacheUpdater.calcProportion(ds.initial_srvs, totalL1Assigned)

View File

@ -51,12 +51,12 @@ class DeployedServiceStatsCollector(Job):
friendly_name = 'Deployed Service Stats'
def __init__(self, environment):
super(DeployedServiceStatsCollector,self).__init__(environment)
super(DeployedServiceStatsCollector, self).__init__(environment)
def run(self):
logger.debug('Starting Deployed service stats collector')
for ds in DeployedService.objects.filter(state = State.ACTIVE):
for ds in DeployedService.objects.filter(state=State.ACTIVE):
try:
fltr = ds.assignedUserServices().exclude(state__in=State.INFO_STATES)
assigned = fltr.count()
@ -66,7 +66,6 @@ class DeployedServiceStatsCollector(Job):
except:
logger.exception('Getting counters for deployed service {0}'.format(ds))
logger.debug('Done Deployed service stats collector')
@ -78,7 +77,7 @@ class StatsCleaner(Job):
* Optimize table
'''
frecuency = 3600*24*15 # Ejecuted just once every 15 days
frecuency = 3600 * 24 * 15 # Ejecuted just once every 15 days
friendly_name = 'Statistic housekeeping'
def run(self):

View File

@ -48,16 +48,15 @@ logger = logging.getLogger(__name__)
# Look for non current cache items and mark them as removables.
class UserServiceInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'User Service Info Cleaner'
def __init__(self, environment):
super(UserServiceInfoItemsCleaner,self).__init__(environment)
super(UserServiceInfoItemsCleaner, self).__init__(environment)
def run(self):
removeFrom = getSqlDatetime() - timedelta(seconds = GlobalConfig.KEEP_INFO_TIME.getInt(True))
removeFrom = getSqlDatetime() - timedelta(seconds=GlobalConfig.KEEP_INFO_TIME.getInt(True))
logger.debug('Removing information user services from {0}'.format(removeFrom))
UserService.objects.select_for_update().filter(state__in=State.INFO_STATES, state_date__lt=removeFrom).delete()
@ -69,7 +68,7 @@ class UserServiceRemover(Job):
removeAtOnce = GlobalConfig.USER_SERVICE_CLEAN_NUMBER.getInt() # Same, it will work at reload
def __init__(self, environment):
super(UserServiceRemover,self).__init__(environment)
super(UserServiceRemover, self).__init__(environment)
def run(self):
removeFrom = getSqlDatetime() - timedelta(seconds=10) # We keep at least 30 seconds the machine before removing it, so we avoid connections errors

View File

@ -32,12 +32,14 @@
'''
from __future__ import unicode_literals
def __init__():
'''
This imports all packages that are descendant of this package, and, after that,
it register all subclases of service provider as
'''
import os.path, pkgutil
import os.path
import pkgutil
import sys
from uds.core import jobs
from uds.core.managers.TaskManager import TaskManager

View File

@ -59,44 +59,44 @@ class TSNXTransport(Transport):
needsJava = True # If this transport needs java for rendering
supportedOss = ['Windows', 'Macintosh', 'Linux']
tunnelServer = gui.TextField(label=_('Tunnel server'), order = 1, tooltip = _('IP or Hostname of tunnel server send to client device ("public" ip) and port. (use HOST:PORT format)'))
tunnelCheckServer = gui.TextField(label=_('Tunnel host check'), order = 2, tooltip = _('If not empty, this server will be used to check if service is running before assigning it to user. (use HOST:PORT format)'))
tunnelServer = gui.TextField(label=_('Tunnel server'), order=1, tooltip=_('IP or Hostname of tunnel server send to client device ("public" ip) and port. (use HOST:PORT format)'))
tunnelCheckServer = gui.TextField(label=_('Tunnel host check'), order=2, tooltip=_('If not empty, this server will be used to check if service is running before assigning it to user. (use HOST:PORT format)'))
useEmptyCreds = gui.CheckBoxField(label = _('Empty creds'), order = 3, tooltip = _('If checked, the credentials used to connect will be emtpy'))
fixedName = gui.TextField(label=_('Username'), order = 4, tooltip = _('If not empty, this username will be always used as credential'))
fixedPassword = gui.PasswordField(label=_('Password'), order = 5, tooltip = _('If not empty, this password will be always used as credential'))
listenPort = gui.NumericField(label=_('Listen port'), length = 5, order = 6, tooltip = _('Listening port of NX (ssh) at client machine'), defvalue = '22')
connection = gui.ChoiceField(label=_('Connection'), order = 7, tooltip = _('Connection speed for this transport (quality)'), values = [
useEmptyCreds = gui.CheckBoxField(label=_('Empty creds'), order=3, tooltip=_('If checked, the credentials used to connect will be emtpy'))
fixedName = gui.TextField(label=_('Username'), order=4, tooltip=_('If not empty, this username will be always used as credential'))
fixedPassword = gui.PasswordField(label=_('Password'), order=5, tooltip=_('If not empty, this password will be always used as credential'))
listenPort = gui.NumericField(label=_('Listen port'), length=5, order=6, tooltip=_('Listening port of NX (ssh) at client machine'), defvalue='22')
connection = gui.ChoiceField(label=_('Connection'), order=7, tooltip=_('Connection speed for this transport (quality)'), values=[
{'id' : 'modem', 'text' : 'modem'},
{'id' : 'isdn', 'text' : 'isdn'},
{'id' : 'adsl', 'text' : 'adsl'},
{'id' : 'wan', 'text' : 'wan'},
{'id' : 'lan', 'text' : 'lan'},
] )
session = gui.ChoiceField(label=_('Session'), order = 8, tooltip = _('Desktop session'), values = [
])
session = gui.ChoiceField(label=_('Session'), order=8, tooltip=_('Desktop session'), values=[
{'id' : 'gnome', 'text' : 'gnome'},
{'id' : 'kde', 'text' : 'kde'},
{'id' : 'cde', 'text' : 'cde'},
] )
cacheDisk = gui.ChoiceField(label=_('Disk Cache'), order = 9, tooltip = _('Cache size en Mb stored at disk'), values = [
])
cacheDisk = gui.ChoiceField(label=_('Disk Cache'), order=9, tooltip=_('Cache size en Mb stored at disk'), values=[
{'id' : '0', 'text' : '0 Mb'},
{'id' : '32', 'text' : '32 Mb'},
{'id' : '64', 'text' : '64 Mb'},
{'id' : '128', 'text' : '128 Mb'},
{'id' : '256', 'text' : '256 Mb'},
{'id' : '512', 'text' : '512 Mb'},
] )
cacheMem = gui.ChoiceField(label=_('Memory Cache'), order = 10, tooltip = _('Cache size en Mb keept at memory'), values = [
])
cacheMem = gui.ChoiceField(label=_('Memory Cache'), order=10, tooltip=_('Cache size en Mb keept at memory'), values=[
{'id' : '4', 'text' : '4 Mb'},
{'id' : '8', 'text' : '8 Mb'},
{'id' : '16', 'text' : '16 Mb'},
{'id' : '32', 'text' : '32 Mb'},
{'id' : '64', 'text' : '64 Mb'},
{'id' : '128', 'text' : '128 Mb'},
] )
])
def __init__(self, environment, values = None):
def __init__(self, environment, values=None):
super(TSNXTransport, self).__init__(environment, values)
if values != None:
if values['tunnelServer'].find(':') == -1:
@ -127,8 +127,8 @@ class TSNXTransport(Transport):
'''
Serializes the transport data so we can store it in database
'''
return str.join( '\t', [ 'v1', gui.boolToStr(self._useEmptyCreds), self._fixedName, self._fixedPassword, self._listenPort,
self._connection, self._session, self._cacheDisk, self._cacheMem, self._tunnelServer, self._tunnelCheckServer ] )
return str.join('\t', [ 'v1', gui.boolToStr(self._useEmptyCreds), self._fixedName, self._fixedPassword, self._listenPort,
self._connection, self._session, self._cacheDisk, self._cacheMem, self._tunnelServer, self._tunnelCheckServer ])
def unmarshal(self, string):
data = string.split('\t')
@ -172,7 +172,7 @@ class TSNXTransport(Transport):
if self._fixedPassword is not '':
password = self._fixedPassword
if self._useEmptyCreds is True:
username, password = '',''
username, password = '', ''
width, height = CommonPrefs.getWidthHeight(prefs)
cache = Cache('pam')
@ -180,7 +180,7 @@ class TSNXTransport(Transport):
tunuser = ''.join(random.choice(string.letters + string.digits) for i in xrange(12)) + ("%f" % time.time()).split('.')[1]
tunpass = ''.join(random.choice(string.letters + string.digits) for i in xrange(12))
cache.put(tunuser, tunpass, 60*10) # Credential valid for ten minutes, and for 1 use only
cache.put(tunuser, tunpass, 60 * 10) # Credential valid for ten minutes, and for 1 use only
sshHost, sshPort = self._tunnelServer.split(':')
@ -201,4 +201,3 @@ class TSNXTransport(Transport):
def getHtmlComponent(self, theId, os, componentId):
# We use helper to keep this clean
return getHtmlComponent(self.__module__, componentId)

View File

@ -32,6 +32,8 @@
'''
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from uds.core.util.Config import Config
from uds.core.util import OsDetector
@ -48,16 +50,15 @@ def simpleScrambler(data):
n = ord('M')
pos = 0
for c in data:
res.append( chr(ord(c) ^ n) )
res.append(chr(ord(c) ^ n))
n = n ^ pos
pos = pos + 1
return "".join(res).encode('hex')
def generateHtmlForNX(transport, idUserService, idTransport, os, user, password, extra):
isMac = os['OS'] == OsDetector.Macintosh
applet = reverse('uds.web.views.transcomp', kwargs = { 'idTransport' : idTransport, 'componentId' : '1' })
applet = reverse('uds.web.views.transcomp', kwargs={'idTransport': idTransport, 'componentId': '1'})
# Gets the codebase, simply remove last char from applet
codebase = applet[:-1]
# We generate the "data" parameter
@ -74,15 +75,15 @@ def generateHtmlForNX(transport, idUserService, idTransport, os, user, password,
'tun:' + extra['tun'],
'is:' + idUserService
]
data = simpleScrambler( '\t'.join(data))
data = simpleScrambler('\t'.join(data))
if isMac is True:
msg = '<p>' + _('In order to use this transport, you need to install first OpenNX Client for mac') + '</p>'
msg += '<p>' + _('You can oibtain it from ') + '<a href="{0}">'.format(Config.section('NX').value('downloadUrlMACOS').get()) + _('OpenNx Website') + '</a></p>'
else:
msg = '<p>' + _('In order to use this transport, you need to install first Nomachine Nx Client version 3.5.x') + '</p>'
msg +='<p>' + _('you can obtain it for your platform from') + '<a href="{0}">'.format(Config.section('NX').value('downloadUrl').get()) + _('nochamine web site') + '</a></p>'
msg += '<p>' + _('you can obtain it for your platform from') + '<a href="{0}">'.format(Config.section('NX').value('downloadUrl').get()) + _('nochamine web site') + '</a></p>'
res = '<div idTransport="applet"><applet code="NxTunTransportApplet.class" codebase="%s" archive="%s" width="165" height="22"><param name="data" value="%s"/><param name="permissions" value="all-permissions"/></applet></div>' % (codebase, '1', data )
res = '<div idTransport="applet"><applet code="NxTunTransportApplet.class" codebase="%s" archive="%s" width="165" height="22"><param name="data" value="%s"/><param name="permissions" value="all-permissions"/></applet></div>' % (codebase, '1', data)
res += '<div>' + msg + '</div>'
return res

View File

@ -31,6 +31,7 @@
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from django.utils.translation import ugettext as _
from uds.models import DeployedService, Service, OSManager, Transport, State, Group
from ..auths.AdminAuth import needs_credentials
from django.db import IntegrityError
@ -105,12 +106,12 @@ def getDeployedServices(credentials, all_):
@needs_credentials
def getDeployedService(credentials, id):
def getDeployedService(credentials, id_):
'''
Returns the available deployed services
'''
logger.debug('Returning list of deployed services')
ds = DeployedService.objects.get(pk=id)
ds = DeployedService.objects.get(pk=id_)
if ds.state == State.ACTIVE:
return dictFromDeployedService(ds)
raise InsertException(_('Deployed Service does not exists'))
@ -134,9 +135,9 @@ def createDeployedService(credentials, deployedService):
osManager = None
if serviceInstance.needsManager:
osManager = OSManager.objects.get(pk=deployedService['idOsManager'])
dps = DeployedService.objects.create(name = deployedService['name'], comments = deployedService['comments'], service = service,
osmanager = osManager, state = State.ACTIVE, initial_srvs = initialServices, cache_l1_srvs = cacheL1,
cache_l2_srvs = cacheL2, max_srvs = maxServices, current_pub_revision = 1)
dps = DeployedService.objects.create(name=deployedService['name'], comments=deployedService['comments'], service=service,
osmanager=osManager, state=State.ACTIVE, initial_srvs=initialServices, cache_l1_srvs=cacheL1,
cache_l2_srvs=cacheL2, max_srvs=maxServices, current_pub_revision=1)
# Now we add transports
addTransportsToDeployedService(dps, deployedService['transports'])
except IntegrityError as e:
@ -147,6 +148,7 @@ def createDeployedService(credentials, deployedService):
raise InsertException(str(e))
return str(dps.id)
@needs_credentials
def modifyDeployedService(credentials, deployedService):
'''
@ -183,6 +185,7 @@ def modifyDeployedService(credentials, deployedService):
raise InsertException(str(e))
return True
@needs_credentials
def getGroupsAssignedToDeployedService(credentials, deployedServiceId):
'''
@ -199,6 +202,7 @@ def getGroupsAssignedToDeployedService(credentials, deployedServiceId):
raise InsertException(_('Deployed Service does not exists'))
return grps
@needs_credentials
def assignGroupToDeployedService(credentials, deployedServiceId, groupId):
'''
@ -215,6 +219,7 @@ def assignGroupToDeployedService(credentials, deployedServiceId, groupId):
raise InsertException(_('Deployed Service does not exists'))
return True
@needs_credentials
def removeGroupsFromDeployedService(credentials, deployedServiceId, groupIds):
'''
@ -229,6 +234,7 @@ def removeGroupsFromDeployedService(credentials, deployedServiceId, groupIds):
raise InsertException(_('Deployed Service does not exists'))
return True
@needs_credentials
def getTransportsAssignedToDeployedService(credentias, idDS):
'''
@ -236,13 +242,14 @@ def getTransportsAssignedToDeployedService(credentias, idDS):
'''
try:
ds = DeployedService.objects.get(id=idDS)
return [ dictFromTransport(t) for t in ds.transports.all() ]
return [dictFromTransport(t) for t in ds.transports.all()]
except DeployedService.DoesNotExist:
raise FindException(_('Can\'t find deployed service'))
except Exception as e:
logger.exception("getTransportsForDeployedService: ")
raise FindException(str(e))
@needs_credentials
def assignTransportToDeployedService(credentials, deployedServiceId, transportId):
logger.debug('Assigning transport {0} to service {1}'.format(transportId, deployedServiceId))
@ -257,6 +264,7 @@ def assignTransportToDeployedService(credentials, deployedServiceId, transportId
return True
@needs_credentials
def removeTransportFromDeployedService(credentials, deployedServiceId, transportIds):
'''
@ -285,6 +293,7 @@ def removeDeployedService(credentials, deployedServiceId):
raise InsertException(_('Deployed service does not exists'))
return True
# Registers XML RPC Methods
def registerDeployedServicesFunctions(dispatcher):
dispatcher.register_function(getDeployedServices, 'getDeployedServices')

View File

@ -43,6 +43,7 @@ import logging
logger = logging.getLogger(__name__)
def infoDictFromServiceInstance(service):
if service is not None:
needsPublication = service.publicationType is not None
@ -73,12 +74,14 @@ def infoDictFromServiceInstance(service):
'typeName' : typeName
}
def dictFromService(serv):
service = serv.getInstance()
return { 'idParent' : str(serv.provider_id), 'id' : str(serv.id), 'name' : serv.name,
'comments' : serv.comments, 'type' : serv.data_type, 'typeName' : _(service.name()), 'info' : infoDictFromServiceInstance(service)
}
@needs_credentials
def getServices(credentials, idParent):
'''
@ -94,6 +97,7 @@ def getServices(credentials, idParent):
logger.debug(e)
return res
@needs_credentials
def getAllServices(credentials):
'''
@ -109,6 +113,7 @@ def getAllServices(credentials):
logger.exception('getAllServices')
return res
@needs_credentials
def getServiceGui(credentials, idParent, type):
'''
@ -118,12 +123,13 @@ def getServiceGui(credentials, idParent, type):
logger.debug('getServiceGui parameters: {0}, {1}'.format(idParent, type))
provider = Provider.objects.get(id=idParent).getInstance()
serviceType = provider.getServiceByType(type)
service = serviceType( Environment.getTempEnv(), provider) # Instantiate it so it has the opportunity to alter gui description based on parent
service = serviceType(Environment.getTempEnv(), provider) # Instantiate it so it has the opportunity to alter gui description based on parent
return service.guiDescription(service)
except:
logger.exception('Exception at getServiceGui')
raise
@needs_credentials
def getService(credentials, id):
'''
@ -144,6 +150,7 @@ def getService(credentials, id):
logger.debug('getService res: {0}'.format(res))
return res
@needs_credentials
def createService(credentials, idParent, type, data):
'''
@ -154,7 +161,7 @@ def createService(credentials, idParent, type, data):
provider = Provider.objects.get(id=idParent)
dic = dictFromData(data)
try:
srv = provider.services.create(name = dic['name'], comments = dic['comments'], data_type = type)
srv = provider.services.create(name=dic['name'], comments=dic['comments'], data_type=type)
# Invoque serialization with correct environment
srv.data = srv.getInstance(dic).serialize()
srv.save()