1
0
mirror of https://github.com/ansible/awx.git synced 2024-11-01 08:21:15 +03:00

Merge pull request #5739 from ryanpetrello/optimize-callback-with-logging

further optimize conf.settings access when logging is enabled

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
softwarefactory-project-zuul[bot] 2020-01-22 21:53:04 +00:00 committed by GitHub
commit 3c5e9da9a1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 40 additions and 19 deletions

View File

@ -1,5 +1,4 @@
# Python # Python
from collections import namedtuple
import contextlib import contextlib
import logging import logging
import re import re
@ -136,6 +135,15 @@ def filter_sensitive(registry, key, value):
return value return value
class TransientSetting(object):
__slots__ = ('pk', 'value')
def __init__(self, pk, value):
self.pk = pk
self.value = value
class EncryptedCacheProxy(object): class EncryptedCacheProxy(object):
def __init__(self, cache, registry, encrypter=None, decrypter=None): def __init__(self, cache, registry, encrypter=None, decrypter=None):
@ -186,8 +194,6 @@ class EncryptedCacheProxy(object):
self.set(key, value, log=False, **kwargs) self.set(key, value, log=False, **kwargs)
def _handle_encryption(self, method, key, value): def _handle_encryption(self, method, key, value):
TransientSetting = namedtuple('TransientSetting', ['pk', 'value'])
if value is not empty and self.registry.is_setting_encrypted(key): if value is not empty and self.registry.is_setting_encrypted(key):
# If the setting exists in the database, we'll use its primary key # If the setting exists in the database, we'll use its primary key
# as part of the AES key when encrypting/decrypting # as part of the AES key when encrypting/decrypting

View File

@ -107,6 +107,17 @@ class LogstashFormatterBase(logging.Formatter):
class LogstashFormatter(LogstashFormatterBase): class LogstashFormatter(LogstashFormatterBase):
def __init__(self, *args, **kwargs):
self.cluster_host_id = settings.CLUSTER_HOST_ID
self.tower_uuid = None
uuid = (
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
getattr(settings, 'INSTALL_UUID', None)
)
if uuid:
self.tower_uuid = uuid
super(LogstashFormatter, self).__init__(*args, **kwargs)
def reformat_data_for_log(self, raw_data, kind=None): def reformat_data_for_log(self, raw_data, kind=None):
''' '''
Process dictionaries from various contexts (job events, activity stream Process dictionaries from various contexts (job events, activity stream
@ -231,21 +242,8 @@ class LogstashFormatter(LogstashFormatterBase):
log_kind = record.name[len('awx.analytics.'):] log_kind = record.name[len('awx.analytics.'):]
fields = self.reformat_data_for_log(fields, kind=log_kind) fields = self.reformat_data_for_log(fields, kind=log_kind)
# General AWX metadata # General AWX metadata
for log_name, setting_name in [ fields['cluster_host_id'] = self.cluster_host_id
('type', 'LOG_AGGREGATOR_TYPE'), fields['tower_uuid'] = self.tower_uuid
('cluster_host_id', 'CLUSTER_HOST_ID'),
('tower_uuid', 'LOG_AGGREGATOR_TOWER_UUID')]:
if hasattr(settings, setting_name):
fields[log_name] = getattr(settings, setting_name, None)
elif log_name == 'type':
fields[log_name] = 'other'
uuid = (
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
getattr(settings, 'INSTALL_UUID', None)
)
if uuid:
fields['tower_uuid'] = uuid
return fields return fields
def format(self, record): def format(self, record):

View File

@ -4,6 +4,7 @@
# Python # Python
import logging import logging
import json import json
import os
import requests import requests
import time import time
import threading import threading
@ -18,6 +19,7 @@ from django.conf import settings
# requests futures, a dependency used by these handlers # requests futures, a dependency used by these handlers
from requests_futures.sessions import FuturesSession from requests_futures.sessions import FuturesSession
import cachetools
# AWX # AWX
from awx.main.utils.formatters import LogstashFormatter from awx.main.utils.formatters import LogstashFormatter
@ -273,6 +275,16 @@ HANDLER_MAPPING = {
} }
TTLCache = cachetools.TTLCache
if 'py.test' in os.environ.get('_', ''):
# don't cache settings in unit tests
class TTLCache(TTLCache):
def __getitem__(self, item):
raise KeyError()
class AWXProxyHandler(logging.Handler): class AWXProxyHandler(logging.Handler):
''' '''
Handler specific to the AWX external logging feature Handler specific to the AWX external logging feature
@ -316,6 +328,7 @@ class AWXProxyHandler(logging.Handler):
def get_handler_class(self, protocol): def get_handler_class(self, protocol):
return HANDLER_MAPPING.get(protocol, AWXNullHandler) return HANDLER_MAPPING.get(protocol, AWXNullHandler)
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'get_handler')
def get_handler(self, custom_settings=None, force_create=False): def get_handler(self, custom_settings=None, force_create=False):
new_kwargs = {} new_kwargs = {}
use_settings = custom_settings or settings use_settings = custom_settings or settings
@ -342,10 +355,14 @@ class AWXProxyHandler(logging.Handler):
self._handler.setFormatter(self.formatter) self._handler.setFormatter(self.formatter)
return self._handler return self._handler
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'should_audit')
def should_audit(self):
return settings.LOG_AGGREGATOR_AUDIT
def emit(self, record): def emit(self, record):
if AWXProxyHandler.thread_local.enabled: if AWXProxyHandler.thread_local.enabled:
actual_handler = self.get_handler() actual_handler = self.get_handler()
if settings.LOG_AGGREGATOR_AUDIT: if self.should_audit():
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL) self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
self.auditor.emit(record) self.auditor.emit(record)
return actual_handler.emit(record) return actual_handler.emit(record)