Version 1.1 Tag

This commit is contained in:
Adolfo Gómez 2013-02-27 07:16:20 +00:00
commit 59b6847558
18 changed files with 136 additions and 66 deletions

View File

@ -0,0 +1,5 @@
To build installed, mschart.exe is needed (provided by Microsoft)
You can get it at:
http://www.microsoft.com/en-us/download/details.aspx?id=14422

View File

@ -98,6 +98,7 @@ encoding//src/uds/core/workers/PublicationCleaner.py=utf-8
encoding//src/uds/core/workers/ServiceCacheUpdater.py=utf-8
encoding//src/uds/core/workers/StatsCollector.py=utf-8
encoding//src/uds/core/workers/UserServiceCleaner.py=utf-8
encoding//src/uds/core/workers/__init__.py=utf-8
encoding//src/uds/dispatchers/__init__.py=utf-8
encoding//src/uds/dispatchers/pam/urls.py=utf-8
encoding//src/uds/dispatchers/pam/views.py=utf-8

View File

@ -41,8 +41,3 @@ import services
import auths
import transports
def __init__():
from managers.TaskManager import TaskManager
TaskManager.registerScheduledTasks()
__init__()

View File

@ -31,7 +31,6 @@
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from uds.models import Scheduler
from uds.core import Environmentable
import logging
@ -40,7 +39,8 @@ logger = logging.getLogger(__name__)
class Job(Environmentable):
# Default frecuency, once a day. Remenber that precision will be based on "granurality" of Scheduler
# If a job is used for delayed execution, this attribute is in fact ignored
frecuency = Scheduler.DAY
frecuency = 24*3600+3
friendly_name = 'Unknown'
def __init__(self, environment):
'''

View File

@ -30,17 +30,13 @@
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from uds.models import Provider
from uds.models import Service
from uds.core.util.Config import GlobalConfig
import logging
logger = logging.getLogger(__name__)
class StatsManager(object):
'''
Manager for loggins statistics, so we can provide usefull info about platform usage
@ -138,11 +134,27 @@ class StatsManager(object):
return StatsCounters.get_grouped(ownerType, counterType, owner_id = ownerIds, since = since, to = to, limit = limit, use_max = use_max)
def cleanupCounter(self):
def cleanupCounters(self):
'''
Removes all counters previous to configured max keep time for stat information from database.
'''
pass
from uds.models import StatsCounters, getSqlDatetime, optimizeTable
from django.db import connection, transaction
import datetime
import time
minTime = time.mktime( (getSqlDatetime() - datetime.timedelta(days = GlobalConfig.STATS_DURATION.getInt())).timetuple() )
dbTable = StatsCounters.__dict__['_meta'].db_table
# Don't like how django executes this (recovers all IDS and lauches "DELETE .. WHERE id IN ...)
#StatsCounters.objects.filter(stamp__lt=minTime).delete()
# Used dict, cause environment says _meta is not known :-)
query = 'DELETE FROM {0} where STAMP < {1}'.format(dbTable, minTime)
cursor = connection.cursor()
cursor.execute(query)
# This table will hold a big amount of data, and mayby we erase a a big number of records also.
# This will ensure table is in "good" shape (testing right now, will see at future)
optimizeTable(dbTable)
# Event stats
# Counter stats

View File

@ -77,33 +77,23 @@ class TaskManager(object):
@staticmethod
def registerScheduledTasks():
from uds.core.workers.ServiceCacheUpdater import ServiceCacheUpdater
from uds.core.workers.UserServiceCleaner import UserServiceInfoItemsCleaner, UserServiceRemover
from uds.core.workers.PublicationCleaner import PublicationInfoItemsCleaner, PublicationCleaner
from uds.core.workers.CacheCleaner import CacheCleaner
from uds.core.workers.DeployedServiceCleaner import DeployedServiceInfoItemsCleaner, DeployedServiceRemover
from uds.core.workers.StatsCollector import DeployedServiceStatsCollector
logger.info("Registering sheduled tasks")
TaskManager.registerJob('Service Cache Updater', ServiceCacheUpdater)
TaskManager.registerJob('User Service Info Cleaner', UserServiceInfoItemsCleaner)
TaskManager.registerJob('User Service Cleaner', UserServiceRemover)
TaskManager.registerJob('Publications Info Cleaner', PublicationInfoItemsCleaner)
TaskManager.registerJob('Publication Cleaner', PublicationCleaner)
TaskManager.registerJob('Utility Cache Cleaner', CacheCleaner)
TaskManager.registerJob('Deployed Service Info Cleaner', DeployedServiceInfoItemsCleaner)
TaskManager.registerJob('Deployed Service Cleaner', DeployedServiceRemover)
TaskManager.registerJob('Deployed Service Stats', DeployedServiceStatsCollector)
from uds.core import workers
@staticmethod
def run():
TaskManager.keepRunning = True
# Runs Scheduler in a separate thread and DelayedTasks here
TaskManager.registerScheduledTasks()
noSchedulers = GlobalConfig.SCHEDULER_THREADS.getInt()
noDelayedTasks = GlobalConfig.DELAYED_TASKS_THREADS.getInt()
logger.info('Starting {0} schedulers and {1} task executors'.format(noSchedulers, noDelayedTasks))
threads = []
for n in range(noSchedulers):
thread = SchedulerThread()

View File

@ -41,12 +41,12 @@ logger = logging.getLogger(__name__)
GLOBAL_SECTION = 'UDS'
class Config:
class Config(object):
'''
Keeps persistend configuration data
'''
class _Value:
class _Value(object):
def __init__(self, section, key, default = '', crypt = False, longText = False):
self._section = section
self._key = key
@ -162,7 +162,7 @@ class Config:
except Exception:
pass
class GlobalConfig:
class GlobalConfig(object):
'''
Simple helper to keep track of global configuration
'''
@ -170,9 +170,9 @@ class GlobalConfig:
# Delay between cache checks. reducing this number will increase cache generation speed but also will load service providers
CACHE_CHECK_DELAY = Config.section(GLOBAL_SECTION).value('cacheCheckDelay', '19')
# Delayed task number of threads PER SERVER, with higher number of threads, deplayed task will complete sooner, but it will give more load to overall system
DELAYED_TASKS_THREADS = Config.section(GLOBAL_SECTION).value('delayedTasksThreads', '2')
DELAYED_TASKS_THREADS = Config.section(GLOBAL_SECTION).value('delayedTasksThreads', '4')
# Number of scheduler threads running PER SERVER, with higher number of threads, deplayed task will complete sooner, but it will give more load to overall system
SCHEDULER_THREADS = Config.section(GLOBAL_SECTION).value('schedulerThreads', '2')
SCHEDULER_THREADS = Config.section(GLOBAL_SECTION).value('schedulerThreads', '3')
# Waiting time before removing "errored" and "removed" publications, cache, and user assigned machines. Time is in seconds
CLEANUP_CHECK = Config.section(GLOBAL_SECTION).value('cleanupCheck', '3607')
# Time to maintaing "info state" items before removing it, in seconds
@ -223,37 +223,19 @@ class GlobalConfig:
# Time to restrain a deployed service in case it gives some error at some point
RESTRAINT_TIME = Config.section(GLOBAL_SECTION).value('restrainTime', '600')
# Statistics duration, in days
STATS_DURATION = Config.section(GLOBAL_SECTION).value('statsDuration', '365')
initDone = False
@staticmethod
def initialize():
try:
# All configurations are upper case
# Tries to initialize database data for global config so it is stored asap and get cached for use
GlobalConfig.SESSION_EXPIRE_TIME.get()
GlobalConfig.CACHE_CHECK_DELAY.get()
GlobalConfig.DELAYED_TASKS_THREADS.get()
GlobalConfig.SCHEDULER_THREADS.get()
GlobalConfig.CLEANUP_CHECK.get()
GlobalConfig.KEEP_INFO_TIME.get()
GlobalConfig.MAX_PREPARING_SERVICES.get()
GlobalConfig.MAX_REMOVING_SERVICES.get()
GlobalConfig.USER_SERVICE_CLEAN_NUMBER.get()
GlobalConfig.REMOVAL_CHECK.get()
GlobalConfig.LOGIN_URL.get()
GlobalConfig.USER_SESSION_LENGTH.get()
GlobalConfig.SUPER_USER_LOGIN.get()
GlobalConfig.SUPER_USER_PASS.get()
GlobalConfig.ADMIN_IDLE_TIME.get()
GlobalConfig.CHECK_UNUSED_TIME.get()
GlobalConfig.CSS.get()
GlobalConfig.MAX_LOGIN_TRIES.get()
GlobalConfig.LOGIN_BLOCK.get()
GlobalConfig.AUTORUN_SERVICE.get()
GlobalConfig.REDIRECT_TO_HTTPS.get()
GlobalConfig.MAX_INITIALIZING_TIME.get()
GlobalConfig.CUSTOM_HTML_LOGIN.get()
GlobalConfig.MAX_LOGS_PER_ELEMENT.get()
GlobalConfig.RESTRAINT_TIME.get()
for v in GlobalConfig.__dict__.itervalues():
if type(v) is Config._Value:
v.get()
except:
logger.debug('Config table do not exists!!!, maybe we are installing? :-)')

View File

@ -40,8 +40,9 @@ import logging
logger = logging.getLogger(__name__)
class AssignedAndUnused(Job):
class AssignedAndUnused(object): # When derived from Job, it will be auto-registered
frecuency = GlobalConfig.CHECK_UNUSED_TIME.getInt()
friendly_name = 'Unused services checker'
def __init__(self, environment):
super(AssignedAndUnused,self).__init__(environment)
@ -50,7 +51,7 @@ class AssignedAndUnused(Job):
for ds in DeployedService.objects.all():
osm = ds.osmanager.getInstance()
if osm.processUnusedMachines is True:
logger.debug('Processing unused machines for {0}'.format(osm))
logger.debug('Processing unused services for {0}'.format(osm))
since_state = getSqlDatetime() - timedelta( seconds = GlobalConfig.CHECK_UNUSED_TIME.getInt() / 2 )
for us in ds.assignedUserServices().select_for_update().filter(in_use=False,since_state__lt=since_state):
logger.debug('Found unused assigned service {0}'.format(us))

View File

@ -41,6 +41,7 @@ logger = logging.getLogger(__name__)
class CacheCleaner(Job):
frecuency = 3600*24 # Once a day
friendly_name = 'Utility Cache Cleaner'
def __init__(self, environment):
super(CacheCleaner,self).__init__(environment)

View File

@ -43,6 +43,7 @@ logger = logging.getLogger(__name__)
class DeployedServiceInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'Deployed Service Info Cleaner'
def __init__(self, environment):
super(DeployedServiceInfoItemsCleaner,self).__init__(environment)
@ -54,6 +55,7 @@ class DeployedServiceInfoItemsCleaner(Job):
class DeployedServiceRemover(Job):
frecuency = GlobalConfig.REMOVAL_CHECK.getInt() # Request run publication "removal" every configued seconds. If config value is changed, it will be used at next reload
friendly_name = 'Deployed Service Cleaner'
def __init__(self, environment):
super(DeployedServiceRemover,self).__init__(environment)

View File

@ -45,6 +45,7 @@ logger = logging.getLogger(__name__)
class PublicationInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'Publications Info Cleaner'
def __init__(self, environment):
super(PublicationInfoItemsCleaner,self).__init__(environment)
@ -55,6 +56,7 @@ class PublicationInfoItemsCleaner(Job):
class PublicationCleaner(Job):
frecuency = GlobalConfig.REMOVAL_CHECK.getInt() # Request run publication "removal" every configued seconds. If config value is changed, it will be used at next reload
friendly_name = 'Publication Cleaner'
def __init__(self, environment):
super(PublicationCleaner,self).__init__(environment)

View File

@ -54,6 +54,7 @@ class ServiceCacheUpdater(Job):
'''
frecuency = GlobalConfig.CACHE_CHECK_DELAY.getInt() # Request run cache manager every configured seconds (defaults to 20 seconds).
# If config value is changed, it will be used at next reload
friendly_name = 'Service Cache Updater'
def __init__(self, environment):
super(ServiceCacheUpdater,self).__init__(environment)

View File

@ -33,6 +33,7 @@
from uds.models import DeployedService
from uds.core.util.State import State
from uds.core.util.stats import counters
from uds.core.managers import StatsManager
from uds.core.jobs.Job import Job
import logging
@ -43,6 +44,7 @@ logger = logging.getLogger(__name__)
class DeployedServiceStatsCollector(Job):
frecuency = 599 # Once every ten minutes, 601 is prime
friendly_name = 'Deployed Service Stats'
def __init__(self, environment):
super(DeployedServiceStatsCollector,self).__init__(environment)
@ -64,4 +66,15 @@ class DeployedServiceStatsCollector(Job):
logger.debug('Done Deployed service stats collector')
class StatsCleaner(Job):
frecuency = 3600*24*15 # Ejecuted just once every 15 days
friendly_name = 'Statistic housekeeping'
def run(self):
logger.debug('Starting statistics cleanup')
try:
StatsManager().cleanupCounters()
except:
logger.exception('Cleaning up counters')
logger.debug('Donde statistics cleanup')

View File

@ -50,6 +50,7 @@ logger = logging.getLogger(__name__)
class UserServiceInfoItemsCleaner(Job):
frecuency = GlobalConfig.CLEANUP_CHECK.getInt() # Request run cache "info" cleaner every configured seconds. If config value is changed, it will be used at next reload
friendly_name = 'User Service Info Cleaner'
def __init__(self, environment):
super(UserServiceInfoItemsCleaner,self).__init__(environment)
@ -63,6 +64,8 @@ class UserServiceInfoItemsCleaner(Job):
class UserServiceRemover(Job):
frecuency = GlobalConfig.REMOVAL_CHECK.getInt() # Request run cache "info" cleaner every configued seconds. If config value is changed, it will be used at next reload
friendly_name = 'User Service Cleaner'
removeAtOnce = GlobalConfig.USER_SERVICE_CLEAN_NUMBER.getInt() # Same, it will work at reload
def __init__(self, environment):

View File

@ -0,0 +1,56 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Virtual Cable S.L. nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
def __init__():
'''
This imports all packages that are descendant of this package, and, after that,
it register all subclases of service provider as
'''
import os.path, pkgutil
import sys
from uds.core import jobs
from uds.core.managers.TaskManager import TaskManager
# Dinamycally import children of this package.
pkgpath = os.path.dirname(sys.modules[__name__].__file__)
for _, name, _ in pkgutil.iter_modules([pkgpath]):
__import__(name, globals(), locals(), [], -1)
p = jobs.Job
# This is marked as error in IDE, but it's not (__subclasses__)
for cls in p.__subclasses__():
# Limit to autoregister just workers jobs inside this module
if cls.__module__[0:16] == 'uds.core.workers':
TaskManager.registerJob(cls.friendly_name, cls)
__init__()

View File

@ -61,9 +61,9 @@ def getSqlDatetime(unix=False):
* sqlite
'''
from django.db import connection
con = connection
cursor = con.cursor()
if con.vendor == 'mysql':
cursor = connection.cursor()
if connection.vendor == 'mysql':
cursor.execute('SELECT NOW()')
date = cursor.fetchone()[0]
else:
@ -74,6 +74,12 @@ def getSqlDatetime(unix=False):
else:
return date
def optimizeTable(dbTable):
from django.db import connection
cursor = connection.cursor()
if connection.vendor == 'mysql':
cursor.execute('OPTIMIZE TABLE {0}'.format(dbTable))
# Services
class Provider(models.Model):