mirror of
https://github.com/ansible/awx.git
synced 2024-11-01 08:21:15 +03:00
Merge pull request #5739 from ryanpetrello/optimize-callback-with-logging
further optimize conf.settings access when logging is enabled Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
commit
3c5e9da9a1
@ -1,5 +1,4 @@
|
||||
# Python
|
||||
from collections import namedtuple
|
||||
import contextlib
|
||||
import logging
|
||||
import re
|
||||
@ -136,6 +135,15 @@ def filter_sensitive(registry, key, value):
|
||||
return value
|
||||
|
||||
|
||||
class TransientSetting(object):
|
||||
|
||||
__slots__ = ('pk', 'value')
|
||||
|
||||
def __init__(self, pk, value):
|
||||
self.pk = pk
|
||||
self.value = value
|
||||
|
||||
|
||||
class EncryptedCacheProxy(object):
|
||||
|
||||
def __init__(self, cache, registry, encrypter=None, decrypter=None):
|
||||
@ -186,8 +194,6 @@ class EncryptedCacheProxy(object):
|
||||
self.set(key, value, log=False, **kwargs)
|
||||
|
||||
def _handle_encryption(self, method, key, value):
|
||||
TransientSetting = namedtuple('TransientSetting', ['pk', 'value'])
|
||||
|
||||
if value is not empty and self.registry.is_setting_encrypted(key):
|
||||
# If the setting exists in the database, we'll use its primary key
|
||||
# as part of the AES key when encrypting/decrypting
|
||||
|
@ -107,6 +107,17 @@ class LogstashFormatterBase(logging.Formatter):
|
||||
|
||||
class LogstashFormatter(LogstashFormatterBase):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.cluster_host_id = settings.CLUSTER_HOST_ID
|
||||
self.tower_uuid = None
|
||||
uuid = (
|
||||
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
|
||||
getattr(settings, 'INSTALL_UUID', None)
|
||||
)
|
||||
if uuid:
|
||||
self.tower_uuid = uuid
|
||||
super(LogstashFormatter, self).__init__(*args, **kwargs)
|
||||
|
||||
def reformat_data_for_log(self, raw_data, kind=None):
|
||||
'''
|
||||
Process dictionaries from various contexts (job events, activity stream
|
||||
@ -231,21 +242,8 @@ class LogstashFormatter(LogstashFormatterBase):
|
||||
log_kind = record.name[len('awx.analytics.'):]
|
||||
fields = self.reformat_data_for_log(fields, kind=log_kind)
|
||||
# General AWX metadata
|
||||
for log_name, setting_name in [
|
||||
('type', 'LOG_AGGREGATOR_TYPE'),
|
||||
('cluster_host_id', 'CLUSTER_HOST_ID'),
|
||||
('tower_uuid', 'LOG_AGGREGATOR_TOWER_UUID')]:
|
||||
if hasattr(settings, setting_name):
|
||||
fields[log_name] = getattr(settings, setting_name, None)
|
||||
elif log_name == 'type':
|
||||
fields[log_name] = 'other'
|
||||
|
||||
uuid = (
|
||||
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
|
||||
getattr(settings, 'INSTALL_UUID', None)
|
||||
)
|
||||
if uuid:
|
||||
fields['tower_uuid'] = uuid
|
||||
fields['cluster_host_id'] = self.cluster_host_id
|
||||
fields['tower_uuid'] = self.tower_uuid
|
||||
return fields
|
||||
|
||||
def format(self, record):
|
||||
|
@ -4,6 +4,7 @@
|
||||
# Python
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import threading
|
||||
@ -18,6 +19,7 @@ from django.conf import settings
|
||||
|
||||
# requests futures, a dependency used by these handlers
|
||||
from requests_futures.sessions import FuturesSession
|
||||
import cachetools
|
||||
|
||||
# AWX
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
@ -273,6 +275,16 @@ HANDLER_MAPPING = {
|
||||
}
|
||||
|
||||
|
||||
TTLCache = cachetools.TTLCache
|
||||
|
||||
if 'py.test' in os.environ.get('_', ''):
|
||||
# don't cache settings in unit tests
|
||||
class TTLCache(TTLCache):
|
||||
|
||||
def __getitem__(self, item):
|
||||
raise KeyError()
|
||||
|
||||
|
||||
class AWXProxyHandler(logging.Handler):
|
||||
'''
|
||||
Handler specific to the AWX external logging feature
|
||||
@ -316,6 +328,7 @@ class AWXProxyHandler(logging.Handler):
|
||||
def get_handler_class(self, protocol):
|
||||
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'get_handler')
|
||||
def get_handler(self, custom_settings=None, force_create=False):
|
||||
new_kwargs = {}
|
||||
use_settings = custom_settings or settings
|
||||
@ -342,10 +355,14 @@ class AWXProxyHandler(logging.Handler):
|
||||
self._handler.setFormatter(self.formatter)
|
||||
return self._handler
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'should_audit')
|
||||
def should_audit(self):
|
||||
return settings.LOG_AGGREGATOR_AUDIT
|
||||
|
||||
def emit(self, record):
|
||||
if AWXProxyHandler.thread_local.enabled:
|
||||
actual_handler = self.get_handler()
|
||||
if settings.LOG_AGGREGATOR_AUDIT:
|
||||
if self.should_audit():
|
||||
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
||||
self.auditor.emit(record)
|
||||
return actual_handler.emit(record)
|
||||
|
Loading…
Reference in New Issue
Block a user