1
0
mirror of https://github.com/dkmstr/openuds.git synced 2025-01-21 18:03:54 +03:00

* Added first stats collector

This commit is contained in:
Adolfo Gómez 2013-02-12 14:57:57 +00:00
parent 7c572f0488
commit 35b2713b11
6 changed files with 96 additions and 15 deletions

View File

@ -96,6 +96,7 @@ encoding//src/uds/core/workers/CacheCleaner.py=utf-8
encoding//src/uds/core/workers/DeployedServiceCleaner.py=utf-8
encoding//src/uds/core/workers/PublicationCleaner.py=utf-8
encoding//src/uds/core/workers/ServiceCacheUpdater.py=utf-8
encoding//src/uds/core/workers/StatsCollector.py=utf-8
encoding//src/uds/core/workers/UserServiceCleaner.py=utf-8
encoding//src/uds/dispatchers/__init__.py=utf-8
encoding//src/uds/dispatchers/pam/urls.py=utf-8

View File

@ -82,6 +82,7 @@ class TaskManager(object):
from uds.core.workers.PublicationCleaner import PublicationInfoItemsCleaner, PublicationCleaner
from uds.core.workers.CacheCleaner import CacheCleaner
from uds.core.workers.DeployedServiceCleaner import DeployedServiceInfoItemsCleaner, DeployedServiceRemover
from uds.core.workers.StatsCollector import DeployedServiceStatsCollector
logger.info("Registering sheduled tasks")
TaskManager.registerJob('Service Cache Updater', ServiceCacheUpdater)
@ -92,7 +93,7 @@ class TaskManager(object):
TaskManager.registerJob('Utility Cache Cleaner', CacheCleaner)
TaskManager.registerJob('Deployed Service Info Cleaner', DeployedServiceInfoItemsCleaner)
TaskManager.registerJob('Deployed Service Cleaner', DeployedServiceRemover)
TaskManager.registerJob('Deployed Service Stats', DeployedServiceStatsCollector)
@staticmethod

View File

@ -168,15 +168,15 @@ class GlobalConfig:
'''
SESSION_EXPIRE_TIME = Config.section(GLOBAL_SECTION).value('sessionExpireTime', '24') # Max session duration (in use) after a new publishment has been made
# Delay between cache checks. reducing this number will increase cache generation speed but also will load service providers
CACHE_CHECK_DELAY = Config.section(GLOBAL_SECTION).value('cacheCheckDelay', '20')
CACHE_CHECK_DELAY = Config.section(GLOBAL_SECTION).value('cacheCheckDelay', '19')
# Delayed task number of threads PER SERVER, with higher number of threads, deplayed task will complete sooner, but it will give more load to overall system
DELAYED_TASKS_THREADS = Config.section(GLOBAL_SECTION).value('delayedTasksThreads', '2')
# Number of scheduler threads running PER SERVER, with higher number of threads, deplayed task will complete sooner, but it will give more load to overall system
SCHEDULER_THREADS = Config.section(GLOBAL_SECTION).value('schedulerThreads', '2')
# Waiting time before removing "errored" and "removed" publications, cache, and user assigned machines. Time is in seconds
CLEANUP_CHECK = Config.section(GLOBAL_SECTION).value('cleanupCheck', '3600')
CLEANUP_CHECK = Config.section(GLOBAL_SECTION).value('cleanupCheck', '3607')
# Time to maintaing "info state" items before removing it, in seconds
KEEP_INFO_TIME = Config.section(GLOBAL_SECTION).value('keepInfoTime', '14400') # Defaults to 2 days 172800?? better 4 hours xd
KEEP_INFO_TIME = Config.section(GLOBAL_SECTION).value('keepInfoTime', '14401') # Defaults to 2 days 172800?? better 4 hours xd
# Max number of services to be "preparing" at same time
MAX_PREPARING_SERVICES = Config.section(GLOBAL_SECTION).value('maxPreparingServices', '15') # Defaults to 15 services at once (per service provider)
# Max number of service to be at "removal" state at same time
@ -186,7 +186,7 @@ class GlobalConfig:
# Number of services to initiate removal per run of CacheCleaner
USER_SERVICE_CLEAN_NUMBER = Config.section(GLOBAL_SECTION).value('userServiceCleanNumber', '3') # Defaults to 3 per wun
# Removal Check time for cache, publications and deployed services
REMOVAL_CHECK = Config.section(GLOBAL_SECTION).value('removalCheck', '30') # Defaults to 30 seconds
REMOVAL_CHECK = Config.section(GLOBAL_SECTION).value('removalCheck', '31') # Defaults to 30 seconds
# Login URL
LOGIN_URL = Config.section(GLOBAL_SECTION).value('loginUrl', '/login') # Defaults to /login
# Session duration
@ -199,7 +199,7 @@ class GlobalConfig:
ADMIN_IDLE_TIME = Config.section(GLOBAL_SECTION).value('adminIdleTime', '14400') # Defaults to 4 hous
# Time betwen checks of unused services by os managers
# Unused services will be invoked for every machine assigned but not in use AND that has been assigned at least 1/2 of this time
CHECK_UNUSED_TIME = Config.section(GLOBAL_SECTION).value('checkUnusedTime', '600') # Defaults to 10 minutes
CHECK_UNUSED_TIME = Config.section(GLOBAL_SECTION).value('checkUnusedTime', '631') # Defaults to 10 minutes
# Default CSS Used
CSS = Config.section(GLOBAL_SECTION).value('css', settings.STATIC_URL + 'css/uds.css')
# Max logins before blocking an account

View File

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Virtual Cable S.L. nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from uds.models import DeployedService
from uds.core.util.State import State
from uds.core.util.stats import counters
from uds.core.jobs.Job import Job
import logging
logger = logging.getLogger(__name__)
class DeployedServiceStatsCollector(Job):
frecuency = 599 # Once every ten minutes, 601 is prime
def __init__(self, environment):
super(DeployedServiceStatsCollector,self).__init__(environment)
def run(self):
logger.debug('Starting Deployed service stats collector')
for ds in DeployedService.objects.filter(state = State.ACTIVE):
try:
fltr = ds.assignedUserServices().exclude(state__in=State.INFO_STATES)
assigned = fltr.count()
inUse = fltr.filter(in_use=True).count()
counters.addCounter(ds, counters.CT_ASSIGNED, assigned)
counters.addCounter(ds, counters.CT_INUSE, inUse)
except:
logger.exception('Getting counters for deployed service {0}'.format(ds))
logger.debug('Done Deployed service stats collector')

View File

@ -1679,6 +1679,8 @@ class StatsCounters(models.Model):
'CEIL({0}(value)) AS value '
'FROM {1} WHERE {2}').format(fnc, StatsCounters._meta.db_table, filt)
logger.debug('Stats query: {0}'.format(query))
# We use result as an iterator
return StatsCounters.objects.raw(query)

View File

@ -32,12 +32,13 @@
'''
from django.utils.translation import ugettext as _
from uds.models import DeployedService
from ..auths.AdminAuth import needs_credentials
from ..util.Exceptions import FindException
from uds.core.util.stats import counters
from uds.core.util.Cache import Cache
from uds.models import DeployedService
import cPickle
import time
import logging
@ -48,13 +49,22 @@ cache = Cache('StatsDispatcher')
@needs_credentials
def getDeployedServiceCounters(credentials, id, counter_type, since, to, points, use_max):
try:
us = DeployedService.objects.get(pk=id)
logger.debug(us)
res = []
for x in counters.getCounters(us, counter_type, since=since, to=to, limit=points, use_max=use_max):
res.append({ 'stamp': x[0], 'value': x[1] })
return { 'title': counters.getCounterTitle(counter_type), 'data': res }
return ()
cacheKey = id + str(counter_type)+str(since)+str(to)+str(points)+str(use_max)
val = cache.get(cacheKey)
if val is None:
us = DeployedService.objects.get(pk=id)
val = []
for x in counters.getCounters(us, counter_type, since=since, to=to, limit=points, use_max=use_max):
val.append({ 'stamp': x[0], 'value': x[1] })
if len(val) > 2:
cache.put(cacheKey, cPickle.dumps(val).encode('zip'), 3600)
else:
val = [{'stamp':since, 'value':0 }, {'stamp':to, 'value':0}]
else:
val = cPickle.loads(val.decode('zip'))
return { 'title': counters.getCounterTitle(counter_type), 'data': val }
except:
logger.exception('exception')
raise FindException(_('Service does not exists'))