diff --git a/Makefile b/Makefile index 74376e5e84..744b5fa305 100644 --- a/Makefile +++ b/Makefile @@ -291,7 +291,7 @@ migrate: # Run after making changes to the models to create a new migration. dbchange: - $(PYTHON) manage.py schemamigration main v14_changes --auto + $(PYTHON) manage.py makemigrations # access database shell, asks for password dbshell: diff --git a/awx/api/generics.py b/awx/api/generics.py index a4a1112d08..93dd3ba444 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -32,7 +32,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', 'SubListCreateAttachDetachAPIView', 'RetrieveAPIView', 'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView', 'RetrieveUpdateDestroyAPIView', 'DestroyAPIView', - 'MongoAPIView', 'MongoListAPIView'] + 'SubDetailAPIView', + 'ParentMixin',] logger = logging.getLogger('awx.api.generics') @@ -219,28 +220,6 @@ class GenericAPIView(generics.GenericAPIView, APIView): d['settings'] = settings return d - -class MongoAPIView(GenericAPIView): - - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - -class MongoListAPIView(generics.ListAPIView, MongoAPIView): - pass - class SimpleListAPIView(generics.ListAPIView, GenericAPIView): def get_queryset(self): @@ -277,7 +256,25 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView): # Base class for a list view that allows creating new objects. pass -class SubListAPIView(ListAPIView): +class ParentMixin(object): + + def get_parent_object(self): + parent_filter = { + self.lookup_field: self.kwargs.get(self.lookup_field, None), + } + return get_object_or_404(self.parent_model, **parent_filter) + + def check_parent_access(self, parent=None): + parent = parent or self.get_parent_object() + parent_access = getattr(self, 'parent_access', 'read') + if parent_access in ('read', 'delete'): + args = (self.parent_model, parent_access, parent) + else: + args = (self.parent_model, parent_access, parent, None) + if not self.request.user.can_access(*args): + raise PermissionDenied() + +class SubListAPIView(ListAPIView, ParentMixin): # Base class for a read-only sublist view. # Subclasses should define at least: @@ -297,22 +294,6 @@ class SubListAPIView(ListAPIView): }) return d - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - def get_queryset(self): parent = self.get_parent_object() self.check_parent_access(parent) @@ -449,6 +430,9 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView): else: return self.attach(request, *args, **kwargs) +class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin): + pass + class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView): pass diff --git a/awx/api/metadata.py b/awx/api/metadata.py index a11df8d1ce..01f8fe306e 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -13,7 +13,7 @@ from rest_framework import serializers from rest_framework.request import clone_request # Ansible Tower -from awx.main.models import InventorySource +from awx.main.models import InventorySource, Notifier class Metadata(metadata.SimpleMetadata): @@ -76,6 +76,12 @@ class Metadata(metadata.SimpleMetadata): get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp) field_info['%s_group_by_choices' % cp] = get_group_by_choices() + # Special handling of notification configuration where the required properties + # are conditional on the type selected. + if field.field_name == 'notification_configuration': + for (notification_type_name, notification_tr_name, notification_type_class) in Notifier.NOTIFICATION_TYPES: + field_info[notification_type_name] = notification_type_class.init_parameters + # Update type of fields returned... if field.field_name == 'type': field_info['type'] = 'multiple choice' diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 25e438858c..a24483eb6c 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -9,8 +9,6 @@ import logging from collections import OrderedDict from dateutil import rrule -from rest_framework_mongoengine.serializers import DocumentSerializer - # PyYAML import yaml @@ -46,12 +44,10 @@ from awx.main.conf import tower_settings from awx.api.license import feature_enabled from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField -from awx.fact.models import * # noqa - logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. -DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type') +DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type') # Keys are fields (foreign keys) where, if found on an instance, summary info # should be added to the serialized data. Values are a tuple of field names on @@ -555,19 +551,19 @@ class BaseSerializer(serializers.ModelSerializer): class EmptySerializer(serializers.Serializer): pass - -class BaseFactSerializer(DocumentSerializer): +class BaseFactSerializer(BaseSerializer): __metaclass__ = BaseSerializerMetaclass def get_fields(self): ret = super(BaseFactSerializer, self).get_fields() if 'module' in ret and feature_enabled('system_tracking'): - choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')] - ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False) + # TODO: the values_list may pull in a LOT of entries before the distinct is called + modules = Fact.objects.all().values_list('module', flat=True).distinct() + choices = [(o, o.title()) for o in modules] + ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False) return ret - class UnifiedJobTemplateSerializer(BaseSerializer): class Meta: @@ -868,7 +864,11 @@ class OrganizationSerializer(BaseSerializer): users = reverse('api:organization_users_list', args=(obj.pk,)), admins = reverse('api:organization_admins_list', args=(obj.pk,)), teams = reverse('api:organization_teams_list', args=(obj.pk,)), - activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)) + activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)), + notifiers = reverse('api:organization_notifiers_list', args=(obj.pk,)), + notifiers_any = reverse('api:organization_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:organization_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:organization_notifiers_error_list', args=(obj.pk,)), )) return res @@ -938,6 +938,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): project_updates = reverse('api:project_updates_list', args=(obj.pk,)), schedules = reverse('api:project_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)), + notifiers_any = reverse('api:project_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:project_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:project_notifiers_error_list', args=(obj.pk,)), )) # Backwards compatibility. if obj.current_update: @@ -983,6 +986,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer): res.update(dict( project = reverse('api:project_detail', args=(obj.project.pk,)), cancel = reverse('api:project_update_cancel', args=(obj.pk,)), + notifications = reverse('api:project_update_notifications_list', args=(obj.pk,)), )) return res @@ -1390,6 +1394,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)), hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)), groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)), + notifiers_any = reverse('api:inventory_source_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)), )) if obj.inventory and obj.inventory.active: res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) @@ -1434,6 +1441,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri res.update(dict( inventory_source = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,)), cancel = reverse('api:inventory_update_cancel', args=(obj.pk,)), + notifications = reverse('api:inventory_update_notifications_list', args=(obj.pk,)), )) return res @@ -1672,6 +1680,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)), launch = reverse('api:job_template_launch', args=(obj.pk,)), + notifiers_any = reverse('api:job_template_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:job_template_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:job_template_notifiers_error_list', args=(obj.pk,)), )) if obj.host_config_key: res['callback'] = reverse('api:job_template_callback', args=(obj.pk,)) @@ -1726,6 +1737,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): job_tasks = reverse('api:job_job_tasks_list', args=(obj.pk,)), job_host_summaries = reverse('api:job_job_host_summaries_list', args=(obj.pk,)), activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)), + notifications = reverse('api:job_notifications_list', args=(obj.pk,)), )) if obj.job_template and obj.job_template.active: res['job_template'] = reverse('api:job_template_detail', @@ -2141,6 +2153,79 @@ class JobLaunchSerializer(BaseSerializer): attrs = super(JobLaunchSerializer, self).validate(attrs) return attrs +class NotifierSerializer(BaseSerializer): + + class Meta: + model = Notifier + fields = ('*', 'organization', 'notification_type', 'notification_configuration') + + type_map = {"string": (str, unicode), + "int": (int,), + "bool": (bool,), + "list": (list,), + "password": (str, unicode), + "object": (dict,)} + + def to_representation(self, obj): + ret = super(NotifierSerializer, self).to_representation(obj) + for field in obj.notification_class.init_parameters: + if field in ret['notification_configuration'] and \ + force_text(ret['notification_configuration'][field]).startswith('$encrypted$'): + ret['notification_configuration'][field] = '$encrypted$' + return ret + + def get_related(self, obj): + res = super(NotifierSerializer, self).get_related(obj) + res.update(dict( + test = reverse('api:notifier_test', args=(obj.pk,)), + notifications = reverse('api:notifier_notification_list', args=(obj.pk,)), + )) + if obj.organization and obj.organization.active: + res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) + return res + + def validate(self, attrs): + notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] + missing_fields = [] + incorrect_type_fields = [] + if 'notification_configuration' not in attrs: + return attrs + for field in notification_class.init_parameters: + if field not in attrs['notification_configuration']: + missing_fields.append(field) + continue + field_val = attrs['notification_configuration'][field] + field_type = notification_class.init_parameters[field]['type'] + expected_types = self.type_map[field_type] + if not type(field_val) in expected_types: + incorrect_type_fields.append((field, field_type)) + continue + if field_type == "password" and field_val.startswith('$encrypted$'): + missing_fields.append(field) + error_list = [] + if missing_fields: + error_list.append("Missing required fields for Notification Configuration: {}".format(missing_fields)) + if incorrect_type_fields: + for type_field_error in incorrect_type_fields: + error_list.append("Configuration field '{}' incorrect type, expected {}".format(type_field_error[0], + type_field_error[1])) + if error_list: + raise serializers.ValidationError(error_list) + return attrs + +class NotificationSerializer(BaseSerializer): + + class Meta: + model = Notification + fields = ('*', '-name', '-description', 'notifier', 'error', 'status', 'notifications_sent', + 'notification_type', 'recipients', 'subject') + + def get_related(self, obj): + res = super(NotificationSerializer, self).get_related(obj) + res.update(dict( + notifier = reverse('api:notifier_detail', args=(obj.notifier.pk,)), + )) + return res class ScheduleSerializer(BaseSerializer): @@ -2391,28 +2476,31 @@ class AuthTokenSerializer(serializers.Serializer): class FactVersionSerializer(BaseFactSerializer): - related = serializers.SerializerMethodField('get_related') class Meta: - model = FactVersion - fields = ('related', 'module', 'timestamp',) + model = Fact + fields = ('related', 'module', 'timestamp') + read_only_fields = ('*',) def get_related(self, obj): - host_obj = self.context.get('host_obj') - res = {} + res = super(FactVersionSerializer, self).get_related(obj) params = { 'datetime': timestamp_apiformat(obj.timestamp), 'module': obj.module, } - res.update(dict( - fact_view = build_url('api:host_fact_compare_view', args=(host_obj.pk,), get=params), - )) + res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params) return res - class FactSerializer(BaseFactSerializer): class Meta: model = Fact - depth = 2 - fields = ('timestamp', 'host', 'module', 'fact') + # TODO: Consider adding in host to the fields list ? + fields = ('related', 'timestamp', 'module', 'facts', 'id', 'summary_fields', 'host') + read_only_fields = ('*',) + + def get_related(self, obj): + res = super(FactSerializer, self).get_related(obj) + res['host'] = obj.host.get_absolute_url() + return res + diff --git a/awx/api/urls.py b/awx/api/urls.py index a249077c8e..d3cde02401 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -20,6 +20,10 @@ organization_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/projects/$', 'organization_projects_list'), url(r'^(?P[0-9]+)/teams/$', 'organization_teams_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'organization_activity_stream_list'), + url(r'^(?P[0-9]+)/notifiers/$', 'organization_notifiers_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'organization_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'organization_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'organization_notifiers_success_list'), ) user_urls = patterns('awx.api.views', @@ -44,12 +48,16 @@ project_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/project_updates/$', 'project_updates_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'project_activity_stream_list'), url(r'^(?P[0-9]+)/schedules/$', 'project_schedules_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'project_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'project_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'project_notifiers_success_list'), ) project_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'project_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'project_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'project_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'project_update_notifications_list'), ) team_urls = patterns('awx.api.views', @@ -92,8 +100,8 @@ host_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'), url(r'^(?P[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'), #url(r'^(?P[0-9]+)/single_fact/$', 'host_single_fact_view'), - url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), - url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), + url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), + url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), ) group_urls = patterns('awx.api.views', @@ -121,12 +129,16 @@ inventory_source_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'inventory_source_schedules_list'), url(r'^(?P[0-9]+)/groups/$', 'inventory_source_groups_list'), url(r'^(?P[0-9]+)/hosts/$', 'inventory_source_hosts_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'inventory_source_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'inventory_source_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'inventory_source_notifiers_success_list'), ) inventory_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'inventory_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'inventory_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'inventory_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'inventory_update_notifications_list'), ) inventory_script_urls = patterns('awx.api.views', @@ -168,6 +180,9 @@ job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', 'job_template_survey_spec'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'job_template_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'job_template_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'job_template_notifiers_success_list'), ) job_urls = patterns('awx.api.views', @@ -182,6 +197,7 @@ job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/job_tasks/$', 'job_job_tasks_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_activity_stream_list'), url(r'^(?P[0-9]+)/stdout/$', 'job_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'job_notifications_list'), ) job_host_summary_urls = patterns('awx.api.views', @@ -224,6 +240,18 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/cancel/$', 'system_job_cancel'), ) +notifier_urls = patterns('awx.api.views', + url(r'^$', 'notifier_list'), + url(r'^(?P[0-9]+)/$', 'notifier_detail'), + url(r'^(?P[0-9]+)/test/$', 'notifier_test'), + url(r'^(?P[0-9]+)/notifications/$', 'notifier_notification_list'), +) + +notification_urls = patterns('awx.api.views', + url(r'^$', 'notification_list'), + url(r'^(?P[0-9]+)/$', 'notification_detail'), +) + schedule_urls = patterns('awx.api.views', url(r'^$', 'schedule_list'), url(r'^(?P[0-9]+)/$', 'schedule_detail'), @@ -273,6 +301,8 @@ v1_urls = patterns('awx.api.views', url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), url(r'^system_job_templates/', include(system_job_template_urls)), url(r'^system_jobs/', include(system_job_urls)), + url(r'^notifiers/', include(notifier_urls)), + url(r'^notifications/', include(notification_urls)), url(r'^unified_job_templates/$', 'unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index b08bbf7e58..45c854b5f3 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -42,9 +42,6 @@ from rest_framework import status from rest_framework_yaml.parsers import YAMLParser from rest_framework_yaml.renderers import YAMLRenderer -# MongoEngine -import mongoengine - # QSStats import qsstats @@ -56,12 +53,11 @@ from social.backends.utils import load_backends # AWX from awx.main.task_engine import TaskSerializer, TASK_FILE, TEMPORARY_TASK_FILE -from awx.main.tasks import mongodb_control +from awx.main.tasks import mongodb_control, send_notifications from awx.main.access import get_user_queryset from awx.main.ha import is_ha_environment from awx.api.authentication import TaskAuthentication, TokenGetAuthentication from awx.api.utils.decorators import paginated -from awx.api.filters import MongoFilterBackend from awx.api.generics import get_view_name from awx.api.generics import * # noqa from awx.api.license import feature_enabled, feature_exists, LicenseForbids @@ -70,7 +66,6 @@ from awx.main.utils import * # noqa from awx.api.permissions import * # noqa from awx.api.renderers import * # noqa from awx.api.serializers import * # noqa -from awx.fact.models import * # noqa from awx.main.utils import emit_websocket_notification from awx.main.conf import tower_settings @@ -137,6 +132,8 @@ class ApiV1RootView(APIView): data['schedules'] = reverse('api:schedule_list') data['roles'] = reverse('api:role_list') data['resources'] = reverse('api:resource_list') + data['notifiers'] = reverse('api:notifier_list') + data['notifications'] = reverse('api:notification_list') data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') @@ -252,32 +249,12 @@ class ApiV1ConfigView(APIView): # FIX: Log return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST) - # Sanity check: If this license includes system tracking, make - # sure that we have a valid MongoDB to point to, and complain if - # we do not. - if ('features' in license_data and 'system_tracking' in license_data['features'] and - license_data['features']['system_tracking'] and settings.MONGO_HOST == NotImplemented): - return Response({ - 'error': 'This license supports system tracking, which ' - 'requires MongoDB to be installed. Since you are ' - 'running in an HA environment, you will need to ' - 'provide a MongoDB instance. Please re-run the ' - 'installer prior to installing this license.' - }, status=status.HTTP_400_BAD_REQUEST) - # If the license is valid, write it to disk. if license_data['valid_key']: tower_settings.LICENSE = data_actual - - # Spawn a task to ensure that MongoDB is started (or stopped) - # as appropriate, based on whether the license uses it. - if license_data['features']['system_tracking']: - mongodb_control.delay('start') - else: - mongodb_control.delay('stop') - - # Done; return the response. + tower_settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data) + return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): @@ -698,6 +675,35 @@ class OrganizationActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class OrganizationNotifiersList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers' + parent_key = 'organization' + +class OrganizationNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_any' + +class OrganizationNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_error' + +class OrganizationNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_success' + class TeamList(ListCreateAPIView): model = Team @@ -868,6 +874,26 @@ class ProjectActivityStreamList(SubListAPIView): return qs.filter(project=parent) return qs.filter(Q(project=parent) | Q(credential__in=parent.credential)) +class ProjectNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_any' + +class ProjectNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_error' + +class ProjectNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_success' class ProjectUpdatesList(SubListAPIView): @@ -918,6 +944,12 @@ class ProjectUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class ProjectUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Project + relationship = 'notifications' class UserList(ListCreateAPIView): @@ -1172,33 +1204,6 @@ class InventoryScanJobTemplateList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent) -class InventorySingleFactView(MongoAPIView): - - model = Fact - parent_model = Inventory - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - inventory_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in inventory_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - - class HostList(ListCreateAPIView): model = Host @@ -1285,88 +1290,43 @@ class HostActivityStreamList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(Q(host=parent) | Q(inventory=parent.inventory)) -class HostFactVersionsList(MongoListAPIView): +class HostFactVersionsList(ListAPIView, ParentMixin): + model = Fact serializer_class = FactVersionSerializer parent_model = Host new_in_220 = True - filter_backends = (MongoFilterBackend,) def get_queryset(self): - from_spec = self.request.query_params.get('from', None) - to_spec = self.request.query_params.get('to', None) - module_spec = self.request.query_params.get('module', None) - if not feature_enabled("system_tracking"): raise LicenseForbids("Your license does not permit use " "of system tracking.") - host = self.get_parent_object() - self.check_parent_access(host) + from_spec = self.request.query_params.get('from', None) + to_spec = self.request.query_params.get('to', None) + module_spec = self.request.query_params.get('module', None) - try: - fact_host = FactHost.objects.get(hostname=host.name, inventory_id=host.inventory.pk) - except FactHost.DoesNotExist: - return None - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) + if from_spec: + from_spec = dateutil.parser.parse(from_spec) + if to_spec: + to_spec = dateutil.parser.parse(to_spec) - kv = { - 'host': fact_host.id, - } - if module_spec is not None: - kv['module'] = module_spec - if from_spec is not None: - from_actual = dateutil.parser.parse(from_spec) - kv['timestamp__gt'] = from_actual - if to_spec is not None: - to_actual = dateutil.parser.parse(to_spec) - kv['timestamp__lte'] = to_actual + host_obj = self.get_parent_object() - return FactVersion.objects.filter(**kv).order_by("-timestamp") + return Fact.get_timeline(host_obj.id, module=module_spec, ts_from=from_spec, ts_to=to_spec) def list(self, *args, **kwargs): queryset = self.get_queryset() or [] - try: - serializer = FactVersionSerializer(queryset, many=True, context=dict(host_obj=self.get_parent_object())) - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) - return Response(dict(results=serializer.data)) + return Response(dict(results=self.serializer_class(queryset, many=True).data)) -class HostSingleFactView(MongoAPIView): +class HostFactCompareView(SubDetailAPIView): model = Fact - parent_model = Host - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - host_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([host_obj.name], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - -class HostFactCompareView(MongoAPIView): - new_in_220 = True parent_model = Host serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - def get(self, request, *args, **kwargs): + def retrieve(self, request, *args, **kwargs): # Sanity check: Does the license allow system tracking? if not feature_enabled('system_tracking'): raise LicenseForbids('Your license does not permit use ' @@ -1377,10 +1337,11 @@ class HostFactCompareView(MongoAPIView): datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() host_obj = self.get_parent_object() - fact_entry = Fact.get_host_version(host_obj.name, host_obj.inventory.pk, datetime_actual, module_spec) - host_data = FactSerializer(fact_entry).data if fact_entry is not None else {} - return Response(host_data) + fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual) + if not fact_entry: + return Response({'detail': 'Fact not found'}, status=status.HTTP_404_NOT_FOUND) + return Response(self.serializer_class(instance=fact_entry).data) class GroupList(ListCreateAPIView): @@ -1549,33 +1510,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView): obj.mark_inactive_recursive() return Response(status=status.HTTP_204_NO_CONTENT) - -class GroupSingleFactView(MongoAPIView): - - model = Fact - parent_model = Group - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - group_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in group_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - class InventoryGroupsList(SubListCreateAttachDetachAPIView): model = Group @@ -1803,6 +1737,27 @@ class InventorySourceActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class InventorySourceNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_any' + +class InventorySourceNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_error' + +class InventorySourceNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_success' + class InventorySourceHostsList(SubListAPIView): model = Host @@ -1867,6 +1822,13 @@ class InventoryUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class InventoryUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = InventoryUpdate + relationship = 'notifications' + class JobTemplateList(ListCreateAPIView): model = JobTemplate @@ -2036,6 +1998,27 @@ class JobTemplateActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class JobTemplateNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_any' + +class JobTemplateNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_error' + +class JobTemplateNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_success' + class JobTemplateCallback(GenericAPIView): model = JobTemplate @@ -2369,6 +2352,13 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView): headers = {'Location': new_job.get_absolute_url()} return Response(data, status=status.HTTP_201_CREATED, headers=headers) +class JobNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Job + relationship = 'notifications' + class BaseJobHostSummariesList(SubListAPIView): model = JobHostSummary @@ -3022,6 +3012,58 @@ class AdHocCommandStdout(UnifiedJobStdout): model = AdHocCommand new_in_220 = True +class NotifierList(ListCreateAPIView): + + model = Notifier + serializer_class = NotifierSerializer + new_in_300 = True + +class NotifierDetail(RetrieveUpdateDestroyAPIView): + + model = Notifier + serializer_class = NotifierSerializer + new_in_300 = True + +class NotifierTest(GenericAPIView): + + view_name = 'Notifier Test' + model = Notifier + serializer_class = EmptySerializer + new_in_300 = True + + def post(self, request, *args, **kwargs): + obj = self.get_object() + notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, tower_settings.TOWER_URL_BASE), + {"body": "Ansible Tower Test Notification {} {}".format(obj.id, tower_settings.TOWER_URL_BASE)}) + if not notification: + return Response({}, status=status.HTTP_400_BAD_REQUEST) + else: + send_notifications.delay([notification.id]) + headers = {'Location': notification.get_absolute_url()} + return Response({"notification": notification.id}, + headers=headers, + status=status.HTTP_202_ACCEPTED) + +class NotifierNotificationList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Notifier + relationship = 'notifications' + parent_key = 'notifier' + +class NotificationList(ListAPIView): + + model = Notification + serializer_class = NotificationSerializer + new_in_300 = True + +class NotificationDetail(RetrieveAPIView): + + model = Notification + serializer_class = NotificationSerializer + new_in_300 = True + class ActivityStreamList(SimpleListAPIView): model = ActivityStream diff --git a/awx/main/access.py b/awx/main/access.py index 5a7ec03263..84eb3957a9 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1486,6 +1486,31 @@ class ScheduleAccess(BaseAccess): else: return False +class NotifierAccess(BaseAccess): + ''' + I can see/use a notifier if I have permission to + ''' + model = Notifier + + def get_queryset(self): + qs = self.model.objects.filter(active=True).distinct() + if self.user.is_superuser: + return qs + return qs + +class NotificationAccess(BaseAccess): + ''' + I can see/use a notification if I have permission to + ''' + model = Notification + + def get_queryset(self): + qs = self.model.objects.distinct() + if self.user.is_superuser: + return qs + return qs + + class ActivityStreamAccess(BaseAccess): ''' I can see activity stream events only when I have permission on all objects included in the event @@ -1745,3 +1770,5 @@ register_access(CustomInventoryScript, CustomInventoryScriptAccess) register_access(TowerSettings, TowerSettingsAccess) register_access(Role, RoleAccess) register_access(Resource, ResourceAccess) +register_access(Notifier, NotifierAccess) +register_access(Notification, NotificationAccess) diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index aa3abe1bfd..42fc25a561 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -9,9 +9,11 @@ from datetime import datetime # Django from django.core.management.base import NoArgsCommand from django.conf import settings +#from django.core.exceptions import Does # AWX -from awx.fact.models.fact import * # noqa +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host from awx.main.socket import Socket logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') @@ -47,35 +49,34 @@ class FactCacheReceiver(object): # ansible v2 will not emit this message. Thus, this can be removed at that time. if 'module_setup' in facts_data and len(facts_data) == 1: logger.info('Received module_setup message') - return + return None try: - host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id) - except FactHost.DoesNotExist: - logger.info('Creating new host <%s, %s>' % (hostname, inventory_id)) - host = FactHost(hostname=hostname, inventory_id=inventory_id) - host.save() - logger.info('Created new host <%s>' % (host.id)) - except FactHost.MultipleObjectsReturned: - query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id) - logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query)) + host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id) + except Fact.DoesNotExist: + logger.warn('Failed to intake fact. Host does not exist <%s, %s>' % (hostname, inventory_id)) return + except Fact.MultipleObjectsReturned: + logger.warn('Database inconsistent. Multiple Hosts found for <%s, %s>.' % (hostname, inventory_id)) + return None except Exception, e: logger.error("Exception communicating with Fact Cache Database: %s" % str(e)) - return + return None - (module, facts) = self.process_facts(facts_data) + (module_name, facts) = self.process_facts(facts_data) self.timestamp = datetime.fromtimestamp(date_key, None) - try: - # Update existing Fact entry - version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module) - Fact.objects(id=version_obj.fact.id).update_one(fact=facts) - logger.info('Updated existing fact <%s>' % (version_obj.fact.id)) - except FactVersion.DoesNotExist: + # Update existing Fact entry + fact_obj = Fact.get_host_fact(host_obj.id, module_name, self.timestamp) + if fact_obj: + fact_obj.facts = facts + fact_obj.save() + logger.info('Updated existing fact <%s>' % (fact_obj.id)) + else: # Create new Fact entry - (fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module) - logger.info('Created new fact <%s, %s>' % (fact_obj.id, version_obj.id)) + fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts) + logger.info('Created new fact <%s, %s>' % (fact_obj.id, module_name)) + return fact_obj def run_receiver(self, use_processing_threads=True): with Socket('fact_cache', 'r') as facts: diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index d49dbf1669..5b5dd3bff0 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand # AWX from awx.main.models import * # noqa from awx.main.queue import FifoQueue -from awx.main.tasks import handle_work_error +from awx.main.tasks import handle_work_error, handle_work_success from awx.main.utils import get_system_task_capacity # Celery @@ -265,14 +265,15 @@ def process_graph(graph, task_capacity): [{'type': graph.get_node_type(n['node_object']), 'id': n['node_object'].id} for n in node_dependencies] error_handler = handle_work_error.s(subtasks=dependent_nodes) - start_status = node_obj.start(error_callback=error_handler) + success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj), + 'id': node_obj.id}) + start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler) if not start_status: node_obj.status = 'failed' if node_obj.job_explanation: node_obj.job_explanation += ' ' node_obj.job_explanation += 'Task failed pre-start check.' node_obj.save() - # TODO: Run error handler continue remaining_volume -= impact running_impact += impact diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 79e8d8d6dd..6d2c78e454 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -43,7 +43,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), - ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])), + ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])), ('event_data', jsonfield.fields.JSONField(default={}, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), diff --git a/awx/main/migrations/0003_v300_changes.py b/awx/main/migrations/0003_v300_changes.py new file mode 100644 index 0000000000..83b8b4b3ab --- /dev/null +++ b/awx/main/migrations/0003_v300_changes.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonfield.fields +import django.db.models.deletion +from django.conf import settings +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0002_auto_20150616_2121'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('main', '0002_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])), + ('error', models.TextField(default=b'', editable=False, blank=True)), + ('notifications_sent', models.IntegerField(default=0, editable=False)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('recipients', models.TextField(default=b'', editable=False, blank=True)), + ('subject', models.TextField(default=b'', editable=False, blank=True)), + ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ], + options={ + 'ordering': ('pk',), + }, + ), + migrations.CreateModel( + name='Notifier', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('description', models.TextField(default=b'', blank=True)), + ('active', models.BooleanField(default=True, editable=False)), + ('name', models.CharField(unique=True, max_length=512)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('created_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('modified_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('organization', models.ForeignKey(related_name='notifiers', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)), + ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')), + ], + ), + migrations.AddField( + model_name='notification', + name='notifier', + field=models.ForeignKey(related_name='notifications', editable=False, to='main.Notifier'), + ), + migrations.AddField( + model_name='activitystream', + name='notification', + field=models.ManyToManyField(to='main.Notification', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='notifier', + field=models.ManyToManyField(to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_any', + field=models.ManyToManyField(related_name='organization_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_error', + field=models.ManyToManyField(related_name='organization_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_success', + field=models.ManyToManyField(related_name='organization_notifiers_for_success', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjob', + name='notifications', + field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_any', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_error', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_success', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_success', to='main.Notifier', blank=True), + ), + ] diff --git a/awx/main/migrations/0004_v300_changes.py b/awx/main/migrations/0004_v300_changes.py new file mode 100644 index 0000000000..66e523dc78 --- /dev/null +++ b/awx/main/migrations/0004_v300_changes.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonbfield.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0003_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Fact', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)), + ('module', models.CharField(max_length=128)), + ('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)), + ('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')), + ], + ), + migrations.AlterIndexTogether( + name='fact', + index_together=set([('timestamp', 'module', 'host')]), + ), + ] diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 85476a19e7..89b54c4b5f 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -20,6 +20,8 @@ from awx.main.models.configuration import * # noqa from awx.main.models.rbac import * # noqa from awx.main.models.user import * # noqa from awx.main.models.mixins import * # noqa +from awx.main.models.notifications import * # noqa +from awx.main.models.fact import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). @@ -63,3 +65,5 @@ activity_stream_registrar.connect(AdHocCommand) activity_stream_registrar.connect(Schedule) activity_stream_registrar.connect(CustomInventoryScript) activity_stream_registrar.connect(TowerSettings) +activity_stream_registrar.connect(Notifier) +activity_stream_registrar.connect(Notification) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index b695831ada..dfada31484 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -53,6 +53,8 @@ class ActivityStream(models.Model): ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) schedule = models.ManyToManyField("Schedule", blank=True) custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True) + notifier = models.ManyToManyField("Notifier", blank=True) + notification = models.ManyToManyField("Notification", blank=True) def get_absolute_url(self): return reverse('api:activity_stream_detail', args=(self.pk,)) diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 664269a188..c5ab627046 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -5,6 +5,7 @@ import hmac import json import logging +from urlparse import urljoin # Django from django.conf import settings @@ -139,6 +140,9 @@ class AdHocCommand(UnifiedJob): def get_absolute_url(self): return reverse('api:ad_hoc_command_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' @@ -221,8 +225,9 @@ class AdHocCommandEvent(CreatedModifiedModel): ('runner_on_unreachable', _('Host Unreachable'), True), # Tower won't see no_hosts (check is done earlier without callback). #('runner_on_no_hosts', _('No Hosts Matched'), False), - # Tower should probably never see skipped (no conditionals). - #('runner_on_skipped', _('Host Skipped'), False), + # Tower will see skipped (when running in check mode for a module that + # does not support check mode). + ('runner_on_skipped', _('Host Skipped'), False), # Tower does not support async for ad hoc commands. #('runner_on_async_poll', _('Host Polling'), False), #('runner_on_async_ok', _('Host Async OK'), False), diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 61515d7d18..c4edfbd8ba 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -25,7 +25,7 @@ from awx.main.utils import encrypt_field __all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel', 'PasswordFieldsModel', 'PrimordialModel', 'CommonModel', - 'CommonModelNameNotUnique', + 'CommonModelNameNotUnique', 'NotificationFieldsModel', 'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ', 'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN', 'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES', @@ -337,3 +337,26 @@ class CommonModelNameNotUnique(PrimordialModel): max_length=512, unique=False, ) + +class NotificationFieldsModel(BaseModel): + + class Meta: + abstract = True + + notifiers_error = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_errors' + ) + + notifiers_success = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_success' + ) + + notifiers_any = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_any' + ) diff --git a/awx/main/models/fact.py b/awx/main/models/fact.py new file mode 100644 index 0000000000..16a67eb45e --- /dev/null +++ b/awx/main/models/fact.py @@ -0,0 +1,64 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from jsonbfield.fields import JSONField + +__all__ = ('Fact', ) + +class Fact(models.Model): + """A model representing a fact returned from Ansible. + Facts are stored as JSON dictionaries. + """ + host = models.ForeignKey( + 'Host', + related_name='facts', + db_index=True, + on_delete=models.CASCADE, + help_text=_('Host for the facts that the fact scan captured.'), + ) + timestamp = models.DateTimeField( + default=None, + editable=False, + help_text=_('Date and time of the corresponding fact scan gathering time.') + ) + module = models.CharField(max_length=128) + facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.')) + + class Meta: + app_label = 'main' + index_together = [ + ["timestamp", "module", "host"], + ] + + @staticmethod + def get_host_fact(host_id, module, timestamp): + qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp') + if qs: + return qs[0] + else: + return None + + @staticmethod + def get_timeline(host_id, module=None, ts_from=None, ts_to=None): + kwargs = { + 'host__id': host_id, + } + if module: + kwargs['module'] = module + if ts_from and ts_to and ts_from == ts_to: + kwargs['timestamp'] = ts_from + else: + if ts_from: + kwargs['timestamp__gt'] = ts_from + if ts_to: + kwargs['timestamp__lte'] = ts_to + return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module') + + @staticmethod + def add_fact(host_id, module, timestamp, facts): + fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts) + fact_obj.save() + return fact_obj diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 17b51ca923..c289827400 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -6,6 +6,7 @@ import datetime import logging import re import copy +from urlparse import urljoin # Django from django.conf import settings @@ -24,7 +25,9 @@ from awx.main.models.base import * # noqa from awx.main.models.jobs import Job from awx.main.models.unified_jobs import * # noqa from awx.main.models.mixins import ResourceMixin +from awx.main.models.notifications import Notifier from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates +from awx.main.conf import tower_settings __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] @@ -1217,6 +1220,14 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin) return True return False + @property + def notifiers(self): + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization)) + success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization)) + any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization)) + return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + def clean_source(self): source = self.source if source and self.group: @@ -1276,6 +1287,9 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions): def get_absolute_url(self): return reverse('api:inventory_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk)) + def is_blocked_by(self, obj): if type(obj) == InventoryUpdate: if self.inventory_source.inventory == obj.inventory_source.inventory: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 17ab41dbe4..1db5faa2b1 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -6,6 +6,7 @@ import hmac import json import yaml import logging +from urlparse import urljoin # Django from django.conf import settings @@ -22,6 +23,7 @@ from jsonfield import JSONField from awx.main.constants import CLOUD_PROVIDERS from awx.main.models.base import * # noqa from awx.main.models.unified_jobs import * # noqa +from awx.main.models.notifications import Notifier from awx.main.utils import decrypt_field, ignore_inventory_computed_fields from awx.main.utils import emit_websocket_notification from awx.main.redact import PlainTextCleaner @@ -347,6 +349,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): def _can_update(self): return self.can_start_without_user_input() + @property + def notifiers(self): + # Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type + # TODO: Currently there is no org fk on project so this will need to be added once that is + # available after the rbac pr + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project])) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project])) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project])) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.project.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.project.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.project.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) class Job(UnifiedJob, JobOptions): ''' @@ -386,6 +402,9 @@ class Job(UnifiedJob, JobOptions): def get_absolute_url(self): return reverse('api:job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' @@ -502,6 +521,26 @@ class Job(UnifiedJob, JobOptions): dependencies.append(source.create_inventory_update(launch_type='dependency')) return dependencies + def notification_data(self): + data = super(Job, self).notification_data() + all_hosts = {} + for h in self.job_host_summaries.all(): + all_hosts[h.host.name] = dict(failed=h.failed, + changed=h.changed, + dark=h.dark, + failures=h.failures, + ok=h.ok, + processed=h.processed, + skipped=h.skipped) + data.update(dict(inventory=self.inventory.name, + project=self.project.name, + playbook=self.playbook, + credential=self.credential.name, + limit=self.limit, + extra_vars=self.extra_vars, + hosts=all_hosts)) + return data + def handle_extra_data(self, extra_data): extra_vars = {} if isinstance(extra_data, dict): @@ -1082,6 +1121,9 @@ class SystemJob(UnifiedJob, SystemJobOptions): def get_absolute_url(self): return reverse('api:system_job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk)) + def is_blocked_by(self, obj): return True diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py new file mode 100644 index 0000000000..29a51cf9ac --- /dev/null +++ b/awx/main/models/notifications.py @@ -0,0 +1,172 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from django.db import models +from django.core.urlresolvers import reverse +from django.core.mail.message import EmailMessage +from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str + +from awx.main.models.base import * # noqa +from awx.main.utils import encrypt_field, decrypt_field +from awx.main.notifications.email_backend import CustomEmailBackend +from awx.main.notifications.slack_backend import SlackBackend +from awx.main.notifications.twilio_backend import TwilioBackend +from awx.main.notifications.pagerduty_backend import PagerDutyBackend +from awx.main.notifications.hipchat_backend import HipChatBackend +from awx.main.notifications.webhook_backend import WebhookBackend +from awx.main.notifications.irc_backend import IrcBackend + +# Django-JSONField +from jsonfield import JSONField + +logger = logging.getLogger('awx.main.models.notifications') + +__all__ = ['Notifier', 'Notification'] + +class Notifier(CommonModel): + + NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), + ('slack', _('Slack'), SlackBackend), + ('twilio', _('Twilio'), TwilioBackend), + ('pagerduty', _('Pagerduty'), PagerDutyBackend), + ('hipchat', _('HipChat'), HipChatBackend), + ('webhook', _('Webhook'), WebhookBackend), + ('irc', _('IRC'), IrcBackend)] + NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] + CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) + + class Meta: + app_label = 'main' + + organization = models.ForeignKey( + 'Organization', + blank=False, + null=True, + on_delete=models.SET_NULL, + related_name='notifiers', + ) + + notification_type = models.CharField( + max_length = 32, + choices=NOTIFICATION_TYPE_CHOICES, + ) + + notification_configuration = JSONField(blank=False) + + def get_absolute_url(self): + return reverse('api:notifier_detail', args=(self.pk,)) + + @property + def notification_class(self): + return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] + + def save(self, *args, **kwargs): + new_instance = not bool(self.pk) + update_fields = kwargs.get('update_fields', []) + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + if new_instance: + value = self.notification_configuration[field] + setattr(self, '_saved_{}_{}'.format("config", field), value) + self.notification_configuration[field] = '' + else: + encrypted = encrypt_field(self, 'notification_configuration', subfield=field) + self.notification_configuration[field] = encrypted + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + super(Notifier, self).save(*args, **kwargs) + if new_instance: + update_fields = [] + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '') + self.notification_configuration[field] = saved_value + #setattr(self.notification_configuration, field, saved_value) + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + self.save(update_fields=update_fields) + + @property + def recipients(self): + return self.notification_configuration[self.notification_class.recipient_parameter] + + def generate_notification(self, subject, message): + notification = Notification(notifier=self, + notification_type=self.notification_type, + recipients=smart_str(self.recipients), + subject=subject, + body=message) + notification.save() + return notification + + def send(self, subject, body): + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + self.notification_configuration[field] = decrypt_field(self, + 'notification_configuration', + subfield=field) + recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter) + if not isinstance(recipients, list): + recipients = [recipients] + sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None) + backend_obj = self.notification_class(**self.notification_configuration) + notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients) + return backend_obj.send_messages([notification_obj]) + +class Notification(CreatedModifiedModel): + ''' + A notification event emitted when a Notifier is run + ''' + + NOTIFICATION_STATE_CHOICES = [ + ('pending', _('Pending')), + ('successful', _('Successful')), + ('failed', _('Failed')), + ] + + class Meta: + app_label = 'main' + ordering = ('pk',) + + notifier = models.ForeignKey( + 'Notifier', + related_name='notifications', + on_delete=models.CASCADE, + editable=False + ) + status = models.CharField( + max_length=20, + choices=NOTIFICATION_STATE_CHOICES, + default='pending', + editable=False, + ) + error = models.TextField( + blank=True, + default='', + editable=False, + ) + notifications_sent = models.IntegerField( + default=0, + editable=False, + ) + notification_type = models.CharField( + max_length = 32, + choices=Notifier.NOTIFICATION_TYPE_CHOICES, + ) + recipients = models.TextField( + blank=True, + default='', + editable=False, + ) + subject = models.TextField( + blank=True, + default='', + editable=False, + ) + body = JSONField(blank=True) + + def get_absolute_url(self): + return reverse('api:notification_detail', args=(self.pk,)) diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 0cd50d9dcc..89b61f4fee 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -25,7 +25,7 @@ from awx.main.conf import tower_settings __all__ = ['Organization', 'Team', 'Permission', 'Profile', 'AuthToken'] -class Organization(CommonModel, ResourceMixin): +class Organization(CommonModel, NotificationFieldsModel, ResourceMixin): ''' An organization is the basic unit of multi-tenancy divisions ''' diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index d0fd122584..cf7f269e63 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -20,10 +20,12 @@ from django.utils.timezone import now, make_aware, get_default_timezone from awx.lib.compat import slugify from awx.main.models.base import * # noqa from awx.main.models.jobs import Job +from awx.main.models.notifications import Notifier from awx.main.models.unified_jobs import * # noqa from awx.main.models.mixins import ResourceMixin from awx.main.utils import update_scm_url from awx.main.fields import ImplicitRoleField +from awx.main.conf import tower_settings __all__ = ['Project', 'ProjectUpdate'] @@ -330,6 +332,18 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin): return True return False + @property + def notifiers(self): + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self)) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self)) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self)) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) + def get_absolute_url(self): return reverse('api:project_detail', args=(self.pk,)) @@ -391,6 +405,9 @@ class ProjectUpdate(UnifiedJob, ProjectOptions): def get_absolute_url(self): return reverse('api:project_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk)) + def _update_parent_instance(self): parent_instance = self._get_parent_instance() if parent_instance: diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 9b4be868c3..3750ccf41e 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -17,6 +17,7 @@ from django.db import models from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from django.utils.timezone import now +from django.utils.encoding import smart_text # Django-JSONField from jsonfield import JSONField @@ -40,7 +41,7 @@ logger = logging.getLogger('awx.main.models.unified_jobs') CAN_CANCEL = ('new', 'pending', 'waiting', 'running') -class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): +class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel): ''' Concrete base class for unified job templates. ''' @@ -297,6 +298,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): ''' return kwargs # Override if needed in subclass. + @property + def notifiers(self): + ''' + Return notifiers relevant to this Unified Job Template + ''' + # NOTE: Derived classes should implement + return Notifier.objects.none() + def create_unified_job(self, **kwargs): ''' Create a new unified job based on this unified job template. @@ -385,6 +394,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique editable=False, related_name='%(class)s_blocked_jobs+', ) + notifications = models.ManyToManyField( + 'Notification', + editable=False, + related_name='%(class)s_notifications', + ) cancel_flag = models.BooleanField( blank=True, default=False, @@ -470,6 +484,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique else: return '' + def get_ui_url(self): + real_instance = self.get_real_instance() + if real_instance != self: + return real_instance.get_ui_url() + else: + return '' + @classmethod def _get_task_class(cls): raise NotImplementedError # Implement in subclasses. @@ -717,7 +738,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique tasks that might preclude creating one''' return [] - def start(self, error_callback, **kwargs): + def notification_data(self): + return dict(id=self.id, + name=self.name, + url=self.get_ui_url(), + created_by=smart_text(self.created_by), + started=self.started.isoformat(), + finished=self.finished.isoformat(), + status=self.status, + traceback=self.result_traceback) + + def start(self, error_callback, success_callback, **kwargs): ''' Start the task running via Celery. ''' @@ -743,7 +774,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # if field not in needed]) if 'extra_vars' in kwargs: self.handle_extra_data(kwargs['extra_vars']) - task_class().apply_async((self.pk,), opts, link_error=error_callback) + task_class().apply_async((self.pk,), opts, link_error=error_callback, link=success_callback) return True def signal_start(self, **kwargs): @@ -765,7 +796,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # Sanity check: If we are running unit tests, then run synchronously. if getattr(settings, 'CELERY_UNIT_TEST', False): - return self.start(None, **kwargs) + return self.start(None, None, **kwargs) # Save the pending status, and inform the SocketIO listener. self.update_fields(start_args=json.dumps(kwargs), status='pending') diff --git a/awx/main/notifications/__init__.py b/awx/main/notifications/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py new file mode 100644 index 0000000000..8129c33e27 --- /dev/null +++ b/awx/main/notifications/base.py @@ -0,0 +1,20 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import pprint + +from django.utils.encoding import smart_text +from django.core.mail.backends.base import BaseEmailBackend + +class TowerBaseEmailBackend(BaseEmailBackend): + + def format_body(self, body): + if "body" in body: + body_actual = body['body'] + else: + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) + body_actual += pprint.pformat(body, indent=4) + return body_actual diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py new file mode 100644 index 0000000000..9a9d0a9e2d --- /dev/null +++ b/awx/main/notifications/email_backend.py @@ -0,0 +1,28 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import pprint + +from django.utils.encoding import smart_text +from django.core.mail.backends.smtp import EmailBackend + +class CustomEmailBackend(EmailBackend): + + init_parameters = {"host": {"label": "Host", "type": "string"}, + "port": {"label": "Port", "type": "int"}, + "username": {"label": "Username", "type": "string"}, + "password": {"label": "Password", "type": "password"}, + "use_tls": {"label": "Use TLS", "type": "bool"}, + "use_ssl": {"label": "Use SSL", "type": "bool"}, + "sender": {"label": "Sender Email", "type": "string"}, + "recipients": {"label": "Recipient List", "type": "list"}} + recipient_parameter = "recipients" + sender_parameter = "sender" + + def format_body(self, body): + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) + body_actual += pprint.pformat(body, indent=4) + return body_actual diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py new file mode 100644 index 0000000000..420ef928fa --- /dev/null +++ b/awx/main/notifications/hipchat_backend.py @@ -0,0 +1,49 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +import requests + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.hipchat_backend') + +class HipChatBackend(TowerBaseEmailBackend): + + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}, + "color": {"label": "Notification Color", "type": "string"}, + "api_url": {"label": "API Url (e.g: https://mycompany.hipchat.com)", "type": "string"}, + "notify": {"label": "Notify channel", "type": "bool"}, + "message_from": {"label": "Label to be shown with notification", "type": "string"}} + recipient_parameter = "channels" + sender_parameter = "message_from" + + def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs): + super(HipChatBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.color = color + self.api_url = api_url + self.notify = notify + + def send_messages(self, messages): + sent_messages = 0 + + for m in messages: + for rcp in m.recipients(): + r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp), + params={"auth_token": self.token}, + json={"color": self.color, + "message": m.subject, + "notify": self.notify, + "from": m.from_email, + "message_format": "text"}) + if r.status_code != 204: + logger.error(smart_text("Error sending messages: {}".format(r.text))) + if not self.fail_silently: + raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text))) + sent_messages += 1 + return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py new file mode 100644 index 0000000000..61158bbe5d --- /dev/null +++ b/awx/main/notifications/irc_backend.py @@ -0,0 +1,95 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import time +import ssl +import logging + +import irc.client + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.irc_backend') + +class IrcBackend(TowerBaseEmailBackend): + + init_parameters = {"server": {"label": "IRC Server Address", "type": "string"}, + "port": {"label": "IRC Server Port", "type": "int"}, + "nickname": {"label": "IRC Nick", "type": "string"}, + "password": {"label": "IRC Server Password", "type": "password"}, + "use_ssl": {"label": "SSL Connection", "type": "bool"}, + "targets": {"label": "Destination Channels or Users", "type": "list"}} + recipient_parameter = "targets" + sender_parameter = None + + def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs): + super(IrcBackend, self).__init__(fail_silently=fail_silently) + self.server = server + self.port = port + self.nickname = nickname + self.password = password if password != "" else None + self.use_ssl = use_ssl + self.connection = None + + def open(self): + if self.connection is not None: + return False + if self.use_ssl: + connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) + else: + connection_factory = irc.connection.Factory() + try: + self.reactor = irc.client.Reactor() + self.connection = self.reactor.server().connect( + self.server, + self.port, + self.nickname, + password=self.password, + connect_factory=connection_factory, + ) + except irc.client.ServerConnectionError as e: + logger.error(smart_text("Exception connecting to irc server: {}".format(e))) + if not self.fail_silently: + raise + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def on_connect(self, connection, event): + for c in self.channels: + if irc.client.is_channel(c): + connection.join(c) + else: + for m in self.channels[c]: + connection.privmsg(c, m.subject) + self.channels_sent += 1 + + def on_join(self, connection, event): + for m in self.channels[event.target]: + connection.privmsg(event.target, m.subject) + self.channels_sent += 1 + + def send_messages(self, messages): + if self.connection is None: + self.open() + self.channels = {} + self.channels_sent = 0 + for m in messages: + for r in m.recipients(): + if r not in self.channels: + self.channels[r] = [] + self.channels[r].append(m) + self.connection.add_global_handler("welcome", self.on_connect) + self.connection.add_global_handler("join", self.on_join) + start_time = time.time() + process_time = time.time() + while self.channels_sent < len(self.channels) and (process_time - start_time) < 60: + self.reactor.process_once(0.1) + process_time = time.time() + self.reactor.disconnect_all() + return self.channels_sent diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py new file mode 100644 index 0000000000..af6b95cfd6 --- /dev/null +++ b/awx/main/notifications/pagerduty_backend.py @@ -0,0 +1,49 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +import pygerduty + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.pagerduty_backend') + +class PagerDutyBackend(TowerBaseEmailBackend): + + init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"}, + "token": {"label": "API Token", "type": "password"}, + "service_key": {"label": "API Service/Integration Key", "type": "string"}, + "client_name": {"label": "Client Identifier", "type": "string"}} + recipient_parameter = "service_key" + sender_parameter = "client_name" + + def __init__(self, subdomain, token, fail_silently=False, **kwargs): + super(PagerDutyBackend, self).__init__(fail_silently=fail_silently) + self.subdomain = subdomain + self.token = token + + def format_body(self, body): + return body + + def send_messages(self, messages): + sent_messages = 0 + + try: + pager = pygerduty.PagerDuty(self.subdomain, self.token) + except Exception as e: + if not self.fail_silently: + raise + logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e))) + for m in messages: + try: + pager.trigger_incident(m.recipients()[0], + description=m.subject, + details=m.body, + client=m.from_email) + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py new file mode 100644 index 0000000000..00f23ed60c --- /dev/null +++ b/awx/main/notifications/slack_backend.py @@ -0,0 +1,52 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +from slackclient import SlackClient + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.slack_backend') + +class SlackBackend(TowerBaseEmailBackend): + + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}} + recipient_parameter = "channels" + sender_parameter = None + + def __init__(self, token, fail_silently=False, **kwargs): + super(SlackBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.connection = None + + def open(self): + if self.connection is not None: + return False + self.connection = SlackClient(self.token) + if not self.connection.rtm_connect(): + if not self.fail_silently: + raise Exception("Slack Notification Token is invalid") + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def send_messages(self, messages): + if self.connection is None: + self.open() + sent_messages = 0 + for m in messages: + try: + for r in m.recipients(): + self.connection.rtm_send_message(r, m.subject) + sent_messages += 1 + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py new file mode 100644 index 0000000000..df411c68c5 --- /dev/null +++ b/awx/main/notifications/twilio_backend.py @@ -0,0 +1,48 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from twilio.rest import TwilioRestClient + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.twilio_backend') + +class TwilioBackend(TowerBaseEmailBackend): + + init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, + "account_token": {"label": "Account Token", "type": "password"}, + "from_number": {"label": "Source Phone Number", "type": "string"}, + "to_numbers": {"label": "Destination SMS Numbers", "type": "list"}} + recipient_parameter = "to_numbers" + sender_parameter = "from_number" + + def __init__(self, account_sid, account_token, fail_silently=False, **kwargs): + super(TwilioBackend, self).__init__(fail_silently=fail_silently) + self.account_sid = account_sid + self.account_token = account_token + + def send_messages(self, messages): + sent_messages = 0 + try: + connection = TwilioRestClient(self.account_sid, self.account_token) + except Exception as e: + if not self.fail_silently: + raise + logger.error(smart_text("Exception connecting to Twilio: {}".format(e))) + + for m in messages: + try: + connection.messages.create( + to=m.to, + from_=m.from_email, + body=m.subject) + sent_messages += 1 + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py new file mode 100644 index 0000000000..52d85483ab --- /dev/null +++ b/awx/main/notifications/webhook_backend.py @@ -0,0 +1,39 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +import requests +import json + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.webhook_backend') + +class WebhookBackend(TowerBaseEmailBackend): + + init_parameters = {"url": {"label": "Target URL", "type": "string"}, + "headers": {"label": "HTTP Headers", "type": "object"}} + recipient_parameter = "url" + sender_parameter = None + + def __init__(self, headers, fail_silently=False, **kwargs): + self.headers = headers + super(WebhookBackend, self).__init__(fail_silently=fail_silently) + + def format_body(self, body): + return body + + def send_messages(self, messages): + sent_messages = 0 + for m in messages: + r = requests.post("{}".format(m.recipients()[0]), + data=json.dumps(m.body), + headers=self.headers) + if r.status_code >= 400: + logger.error(smart_text("Error sending notification webhook: {}".format(r.text))) + if not self.fail_silently: + raise Exception(smart_text("Error sending notification webhook: {}".format(r.text))) + sent_messages += 1 + return sent_messages diff --git a/awx/main/signals.py b/awx/main/signals.py index 6451da0fe6..b3e84e46d5 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -387,6 +387,8 @@ model_serializer_mapping = { Job: JobSerializer, AdHocCommand: AdHocCommandSerializer, TowerSettings: TowerSettingsSerializer, + Notifier: NotifierSerializer, + Notification: NotificationSerializer, } def activity_stream_create(sender, instance, created, **kwargs): diff --git a/awx/main/tasks.py b/awx/main/tasks.py index acfe2022ae..509c5d1e7e 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -39,6 +39,9 @@ from celery import Task, task from django.conf import settings from django.db import transaction, DatabaseError from django.utils.timezone import now +from django.utils.encoding import smart_text +from django.core.mail import send_mail +from django.contrib.auth.models import User # AWX from awx.lib.metrics import task_timer @@ -46,13 +49,15 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.models import * # noqa from awx.main.queue import FifoQueue from awx.main.conf import tower_settings +from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url, ignore_inventory_computed_fields, emit_websocket_notification, check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) from awx.fact.utils.connection import test_mongo_connection __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'update_inventory_computed_fields'] + 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', + 'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] HIDDEN_PASSWORD = '**********' @@ -64,6 +69,48 @@ Try upgrading OpenSSH or providing your private key in an different format. \ logger = logging.getLogger('awx.main.tasks') +@task() +def send_notifications(notification_list, job_id=None): + if not isinstance(notification_list, list): + raise TypeError("notification_list should be of type list") + if job_id is not None: + job_actual = UnifiedJob.objects.get(id=job_id) + for notification_id in notification_list: + notification = Notification.objects.get(id=notification_id) + try: + sent = notification.notifier.send(notification.subject, notification.body) + notification.status = "successful" + notification.notifications_sent = sent + except Exception as e: + logger.error("Send Notification Failed {}".format(e)) + notification.status = "failed" + notification.error = smart_text(e) + finally: + notification.save() + if job_id is not None: + job_actual.notifications.add(notification) + +@task(bind=True) +def run_administrative_checks(self): + if not tower_settings.TOWER_ADMIN_ALERTS: + return + reader = TaskSerializer() + validation_info = reader.from_database() + if validation_info.get('instance_count', 0) < 1: + return + used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100)) + tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True) + if (used_percentage * 100) > 90: + send_mail("Ansible Tower host usage over 90%", + "Ansible Tower host usage over 90%", + tower_admin_emails, + fail_silently=True) + if validation_info.get('time_remaining', 0) < TASK_TIMEOUT_INTERVAL: + send_mail("Ansible Tower license will expire soon", + "Ansible Tower license will expire soon", + tower_admin_emails, + fail_silently=True) + @task() def bulk_inventory_element_delete(inventory, hosts=[], groups=[]): from awx.main.signals import disable_activity_stream @@ -134,7 +181,6 @@ def notify_task_runner(metadata_dict): queue = FifoQueue('tower_task_manager') queue.push(metadata_dict) - @task() def mongodb_control(cmd): # Sanity check: Do not send arbitrary commands. @@ -159,6 +205,39 @@ def mongodb_control(cmd): p = subprocess.Popen('sudo mongod --shutdown -f /etc/mongod.conf', shell=True) p.wait() +@task(bind=True) +def handle_work_success(self, result, task_actual): + if task_actual['type'] == 'project_update': + instance = ProjectUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" + elif task_actual['type'] == 'inventory_update': + instance = InventoryUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" + elif task_actual['type'] == 'job': + instance = Job.objects.get(id=task_actual['id']) + instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" + elif task_actual['type'] == 'ad_hoc_command': + instance = AdHocCommand.objects.get(id=task_actual['id']) + instance_name = instance.module_name + notifiers = [] # TODO: Ad-hoc commands need to notify someone + friendly_name = "AdHoc Command" + else: + return + notification_body = instance.notification_data() + notification_subject = "{} #{} '{}' succeeded on Ansible Tower: {}".format(friendly_name, + task_actual['id'], + instance_name, + notification_body['url']) + send_notifications.delay([n.generate_notification(notification_subject, notification_body) + for n in set(notifiers.get('success', []) + notifiers.get('any', []))], + job_id=task_actual['id']) + @task(bind=True) def handle_work_error(self, task_id, subtasks=None): print('Executing error task id %s, subtasks: %s' % @@ -173,15 +252,23 @@ def handle_work_error(self, task_id, subtasks=None): if each_task['type'] == 'project_update': instance = ProjectUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" elif each_task['type'] == 'inventory_update': instance = InventoryUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" elif each_task['type'] == 'job': instance = Job.objects.get(id=each_task['id']) instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" elif each_task['type'] == 'ad_hoc_command': instance = AdHocCommand.objects.get(id=each_task['id']) instance_name = instance.module_name + notifiers = [] + friendly_name = "AdHoc Command" else: # Unknown task type break @@ -190,6 +277,7 @@ def handle_work_error(self, task_id, subtasks=None): first_task_id = instance.id first_task_type = each_task['type'] first_task_name = instance_name + first_task_friendly_name = friendly_name if instance.celery_task_id != task_id: instance.status = 'failed' instance.failed = True @@ -197,6 +285,16 @@ def handle_work_error(self, task_id, subtasks=None): (first_task_type, first_task_name, first_task_id) instance.save() instance.socketio_emit_status("failed") + notification_body = first_task.notification_data() + notification_subject = "{} #{} '{}' failed on Ansible Tower: {}".format(first_task_friendly_name, + first_task_id, + first_task_name, + notification_body['url']) + notification_body['friendly_name'] = first_task_friendly_name + send_notifications.delay([n.generate_notification(notification_subject, notification_body).id + for n in set(notifiers.get('error', []) + notifiers.get('any', []))], + job_id=first_task_id) + @task() def update_inventory_computed_fields(inventory_id, should_update_hosts=True): diff --git a/awx/main/tests/functional/ansible.json b/awx/main/tests/functional/ansible.json new file mode 100644 index 0000000000..e877df2ad1 --- /dev/null +++ b/awx/main/tests/functional/ansible.json @@ -0,0 +1,283 @@ +{ + "ansible_all_ipv4_addresses": [ + "172.17.0.7" + ], + "ansible_all_ipv6_addresses": [ + "fe80::42:acff:fe11:7" + ], + "ansible_architecture": "x86_64", + "ansible_bios_date": "12/01/2006", + "ansible_bios_version": "VirtualBox", + "ansible_cmdline": { + "BOOT_IMAGE": "/boot/vmlinuz64", + "base": true, + "console": "tty0", + "initrd": "/boot/initrd.img", + "loglevel": "3", + "noembed": true, + "nomodeset": true, + "norestore": true, + "user": "docker", + "waitusb": "10:LABEL=boot2docker-data" + }, + "ansible_date_time": { + "date": "2016-02-02", + "day": "02", + "epoch": "1454424257", + "hour": "14", + "iso8601": "2016-02-02T14:44:17Z", + "iso8601_basic": "20160202T144417348424", + "iso8601_basic_short": "20160202T144417", + "iso8601_micro": "2016-02-02T14:44:17.348496Z", + "minute": "44", + "month": "02", + "second": "17", + "time": "14:44:17", + "tz": "UTC", + "tz_offset": "+0000", + "weekday": "Tuesday", + "weekday_number": "2", + "weeknumber": "05", + "year": "2016" + }, + "ansible_default_ipv4": { + "address": "172.17.0.7", + "alias": "eth0", + "broadcast": "global", + "gateway": "172.17.0.1", + "interface": "eth0", + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "netmask": "255.255.0.0", + "network": "172.17.0.0", + "type": "ether" + }, + "ansible_default_ipv6": {}, + "ansible_devices": { + "sda": { + "holders": [], + "host": "", + "model": "VBOX HARDDISK", + "partitions": { + "sda1": { + "sectors": "510015555", + "sectorsize": 512, + "size": "243.19 GB", + "start": "1975995" + }, + "sda2": { + "sectors": "1975932", + "sectorsize": 512, + "size": "964.81 MB", + "start": "63" + } + }, + "removable": "0", + "rotational": "0", + "scheduler_mode": "deadline", + "sectors": "512000000", + "sectorsize": "512", + "size": "244.14 GB", + "support_discard": "0", + "vendor": "ATA" + }, + "sr0": { + "holders": [], + "host": "", + "model": "CD-ROM", + "partitions": {}, + "removable": "1", + "rotational": "1", + "scheduler_mode": "deadline", + "sectors": "61440", + "sectorsize": "2048", + "size": "120.00 MB", + "support_discard": "0", + "vendor": "VBOX" + } + }, + "ansible_distribution": "Ubuntu", + "ansible_distribution_major_version": "14", + "ansible_distribution_release": "trusty", + "ansible_distribution_version": "14.04", + "ansible_dns": { + "nameservers": [ + "8.8.8.8" + ] + }, + "ansible_domain": "", + "ansible_env": { + "HOME": "/root", + "HOSTNAME": "ede894599989", + "LANG": "en_US.UTF-8", + "LC_ALL": "en_US.UTF-8", + "LC_MESSAGES": "en_US.UTF-8", + "LESSCLOSE": "/usr/bin/lesspipe %s %s", + "LESSOPEN": "| /usr/bin/lesspipe %s", + "LS_COLORS": "", + "OLDPWD": "/ansible", + "PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "PWD": "/ansible/examples", + "SHLVL": "1", + "_": "/usr/local/bin/ansible", + "container": "docker" + }, + "ansible_eth0": { + "active": true, + "device": "eth0", + "ipv4": { + "address": "172.17.0.7", + "broadcast": "global", + "netmask": "255.255.0.0", + "network": "172.17.0.0" + }, + "ipv6": [ + { + "address": "fe80::42:acff:fe11:7", + "prefix": "64", + "scope": "link" + } + ], + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "promisc": false, + "type": "ether" + }, + "ansible_fips": false, + "ansible_form_factor": "Other", + "ansible_fqdn": "ede894599989", + "ansible_hostname": "ede894599989", + "ansible_interfaces": [ + "lo", + "eth0" + ], + "ansible_kernel": "4.1.12-boot2docker", + "ansible_lo": { + "active": true, + "device": "lo", + "ipv4": { + "address": "127.0.0.1", + "broadcast": "host", + "netmask": "255.0.0.0", + "network": "127.0.0.0" + }, + "ipv6": [ + { + "address": "::1", + "prefix": "128", + "scope": "host" + } + ], + "mtu": 65536, + "promisc": false, + "type": "loopback" + }, + "ansible_lsb": { + "codename": "trusty", + "description": "Ubuntu 14.04.3 LTS", + "id": "Ubuntu", + "major_release": "14", + "release": "14.04" + }, + "ansible_machine": "x86_64", + "ansible_memfree_mb": 3746, + "ansible_memory_mb": { + "nocache": { + "free": 8896, + "used": 3638 + }, + "real": { + "free": 3746, + "total": 12534, + "used": 8788 + }, + "swap": { + "cached": 0, + "free": 4048, + "total": 4048, + "used": 0 + } + }, + "ansible_memtotal_mb": 12534, + "ansible_mounts": [ + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/resolv.conf", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hostname", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hosts", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + } + ], + "ansible_nodename": "ede894599989", + "ansible_os_family": "Debian", + "ansible_pkg_mgr": "apt", + "ansible_processor": [ + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz" + ], + "ansible_processor_cores": 8, + "ansible_processor_count": 1, + "ansible_processor_threads_per_core": 1, + "ansible_processor_vcpus": 8, + "ansible_product_name": "VirtualBox", + "ansible_product_serial": "0", + "ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0", + "ansible_product_version": "1.2", + "ansible_python_version": "2.7.6", + "ansible_selinux": false, + "ansible_service_mgr": "upstart", + "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=", + "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=", + "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH", + "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H", + "ansible_swapfree_mb": 4048, + "ansible_swaptotal_mb": 4048, + "ansible_system": "Linux", + "ansible_system_vendor": "innotek GmbH", + "ansible_uptime_seconds": 178398, + "ansible_user_dir": "/root", + "ansible_user_gecos": "root", + "ansible_user_gid": 0, + "ansible_user_id": "root", + "ansible_user_shell": "/bin/bash", + "ansible_user_uid": 0, + "ansible_userspace_architecture": "x86_64", + "ansible_userspace_bits": "64", + "ansible_virtualization_role": "guest", + "ansible_virtualization_type": "docker", + "module_setup": true +} diff --git a/awx/main/tests/functional/test_activity_streams.py b/awx/main/tests/functional/api/test_activity_streams.py similarity index 100% rename from awx/main/tests/functional/test_activity_streams.py rename to awx/main/tests/functional/api/test_activity_streams.py diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py new file mode 100644 index 0000000000..b203c3deff --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -0,0 +1,236 @@ +# Python +import mock +import pytest +from datetime import timedelta +import urlparse +import urllib + +# AWX +from awx.main.models.fact import Fact +from awx.main.utils import timestamp_apiformat + +# Django +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1): + hosts = hosts(host_count=host_count) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) + + return (hosts[0], response) + +def check_url(url1_full, fact_known, module): + url1_split = urlparse.urlsplit(url1_full) + url1 = url1_split.path + url1_params = urlparse.parse_qsl(url1_split.query) + + url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,)) + url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))] + + assert url1 == url2 + assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params) + +def check_response_facts(facts_known, response): + for i, fact_known in enumerate(facts_known): + assert fact_known.module == response.data['results'][i]['module'] + assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp'] + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_no_facts_db(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + response_expected = { + 'results': [] + } + assert response_expected == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch, + 'to': epoch, + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + + results = response.data['results'] + assert 'related' in results[0] + assert 'timestamp' in results[0] + assert 'module' in results[0] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +@pytest.mark.skipif(True, reason="Options fix landed in devel but not here. Enable this after this pr gets merged.") +def test_basic_options_fields(hosts, fact_scans, options, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = options(url, user('admin', True), pk=hosts[0].id) + + #import json + #print(json.dumps(response.data)) + assert 'related' in response.data + assert 'id' in response.data + assert 'facts' in response.data + assert 'module' in response.data + assert 'host' in response.data + assert isinstance(response.data['host'], int) + assert 'summary_fields' in response.data + assert 'host' in response.data['summary_fields'] + assert 'name' in response.data['summary_fields']['host'] + assert 'description' in response.data['summary_fields']['host'] + assert 'host' in response.data['related'] + assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_related_fact_view(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_multiple_hosts(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch - timedelta(days=10), + 'to': epoch + timedelta(days=10), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to']) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_module(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'module': 'packages', + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, module=search['module']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'to': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_to=search['to']) + assert 6 == len(facts_known) + assert 6 == len(response.data['results']) + + check_response_facts(facts_known, response) + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user_obj) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py new file mode 100644 index 0000000000..e6cd724d91 --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -0,0 +1,156 @@ +import mock +import pytest +import json +import urllib + +from awx.main.utils import timestamp_apiformat +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it +def find_fact(facts, host_id, module_name, timestamp): + for f in facts: + if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp: + return f + raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts)) + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}): + hosts = hosts(host_count=1) + facts = fact_scans(fact_scans=1, timestamp_epoch=epoch) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) + + fact_known = find_fact(facts, hosts[0].id, module_name, epoch) + return (fact_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_no_fact_found(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + expected_response = { + "detail": "Fact not found" + } + assert 404 == response.status_code + assert expected_response == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + assert 'related' in response.data + assert 'id' in response.data + assert 'facts' in response.data + assert 'module' in response.data + assert 'host' in response.data + assert isinstance(response.data['host'], int) + assert 'summary_fields' in response.data + assert 'host' in response.data['summary_fields'] + assert 'name' in response.data['summary_fields']['host'] + assert 'description' in response.data['summary_fields']['host'] + assert 'host' in response.data['related'] + assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_content(hosts, fact_scans, get, user, fact_ansible_json): + (fact_known, response) = setup_common(hosts, fact_scans, get, user) + + assert fact_known.host_id == response.data['host'] + assert fact_ansible_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert fact_known.module == response.data['module'] + +def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name): + params = { + 'module': module_name + } + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params) + + assert fact_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert module_name == response.data['module'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json): + epoch = timezone.now() + module_name = 'packages' + + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch)) + + assert fact_known.id == response.data['id'] + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user_obj) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_host_detail.py b/awx/main/tests/functional/api/test_host_detail.py new file mode 100644 index 0000000000..79213490b0 --- /dev/null +++ b/awx/main/tests/functional/api/test_host_detail.py @@ -0,0 +1,17 @@ +# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint. +# Other host tests should live here to make this test suite more complete. +import pytest + +from django.core.urlresolvers import reverse + +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + + url = reverse('api:host_detail', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + assert 'related' in response.data + assert 'fact_versions' in response.data['related'] + assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions'] + diff --git a/awx/main/tests/functional/commands/__init__.py b/awx/main/tests/functional/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/functional/commands/conftest.py b/awx/main/tests/functional/commands/conftest.py new file mode 100644 index 0000000000..2de8846b0a --- /dev/null +++ b/awx/main/tests/functional/commands/conftest.py @@ -0,0 +1,109 @@ +import pytest +import time + +from datetime import datetime + +@pytest.fixture +def fact_msg_base(inventory, hosts): + host_objs = hosts(1) + return { + 'host': host_objs[0].name, + 'date_key': time.mktime(datetime.utcnow().timetuple()), + 'facts' : { }, + 'inventory_id': inventory.id + } + +@pytest.fixture +def fact_msg_small(fact_msg_base): + fact_msg_base['facts'] = { + 'packages': { + "accountsservice": [ + { + "architecture": "amd64", + "name": "accountsservice", + "source": "apt", + "version": "0.6.35-0ubuntu7.1" + } + ], + "acpid": [ + { + "architecture": "amd64", + "name": "acpid", + "source": "apt", + "version": "1:2.0.21-1ubuntu2" + } + ], + "adduser": [ + { + "architecture": "all", + "name": "adduser", + "source": "apt", + "version": "3.113+nmu3ubuntu3" + } + ], + }, + 'services': [ + { + "name": "acpid", + "source": "sysv", + "state": "running" + }, + { + "name": "apparmor", + "source": "sysv", + "state": "stopped" + }, + { + "name": "atd", + "source": "sysv", + "state": "running" + }, + { + "name": "cron", + "source": "sysv", + "state": "running" + } + ], + 'ansible': { + 'ansible_fact_simple': 'hello world', + 'ansible_fact_complex': { + 'foo': 'bar', + 'hello': [ + 'scooby', + 'dooby', + 'doo' + ] + }, + } + } + return fact_msg_base + + +''' +Facts sent from ansible to our fact cache reciever. +The fact module type is implicit i.e + +Note: The 'ansible' module is an expection to this rule. +It is NOT nested in a dict, and thus does NOT contain a first-level +key of 'ansible' + +{ + 'fact_module_name': { ... }, +} +''' + +@pytest.fixture +def fact_msg_ansible(fact_msg_base, fact_ansible_json): + fact_msg_base['facts'] = fact_ansible_json + return fact_msg_base + +@pytest.fixture +def fact_msg_packages(fact_msg_base, fact_packages_json): + fact_msg_base['facts']['packages'] = fact_packages_json + return fact_msg_base + +@pytest.fixture +def fact_msg_services(fact_msg_base, fact_services_json): + fact_msg_base['facts']['services'] = fact_services_json + return fact_msg_base + diff --git a/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py new file mode 100644 index 0000000000..266272e37c --- /dev/null +++ b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py @@ -0,0 +1,95 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved + +# Python +import pytest +from datetime import datetime +import json + +# Django + +# AWX +from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host + +# TODO: Check that timestamp and other attributes are as expected +def check_process_fact_message_module(fact_returned, data, module_name): + date_key = data['date_key'] + + # Ensure 1, and only 1, fact created + timestamp = datetime.fromtimestamp(date_key, None) + assert 1 == Fact.objects.all().count() + + host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id']) + assert host_obj is not None + fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp) + assert fact_known is not None + assert fact_known == fact_returned + + assert host_obj == fact_returned.host + if module_name == 'ansible': + assert data['facts'] == fact_returned.facts + else: + assert data['facts'][module_name] == fact_returned.facts + assert timestamp == fact_returned.timestamp + assert module_name == fact_returned.module + +@pytest.mark.django_db +def test_process_fact_message_ansible(fact_msg_ansible): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible') + +@pytest.mark.django_db +def test_process_fact_message_packages(fact_msg_packages): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_packages) + + check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages') + +@pytest.mark.django_db +def test_process_fact_message_services(fact_msg_services): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_services) + + check_process_fact_message_module(fact_returned, fact_msg_services, 'services') + +''' +We pickypack our fact sending onto the Ansible fact interface. +The interface is . Where facts is a json blob of all the facts. +This makes it hard to decipher what facts are new/changed. +Because of this, we handle the same fact module data being sent multiple times +and just keep the newest version. +''' +@pytest.mark.django_db +def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible): + #epoch = timezone.now() + epoch = datetime.fromtimestamp(fact_msg_ansible['date_key']) + fact_scans(fact_scans=1, timestamp_epoch=epoch) + key = 'ansible.overwrite' + value = 'hello world' + + receiver = FactCacheReceiver() + receiver.process_fact_message(fact_msg_ansible) + + fact_msg_ansible['facts'][key] = value + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + fact_obj = Fact.objects.get(id=fact_returned.id) + assert key in fact_obj.facts + assert json.loads(fact_obj.facts) == fact_msg_ansible['facts'] + assert value == json.loads(fact_obj.facts)[key] + +# Ensure that the message flows from the socket through to process_fact_message() +@pytest.mark.django_db +def test_run_receiver(mocker, fact_msg_ansible): + mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible]) + + receiver = FactCacheReceiver() + mocker.patch.object(receiver, 'process_fact_message', return_value=None) + + receiver.run_receiver(use_processing_threads=False) + + receiver.process_fact_message.assert_called_once_with(fact_msg_ansible) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index cea7ad01f5..4dfed5fe0f 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,8 +1,23 @@ -import pytest +# Python +import pytest +import mock +import json +import os +from datetime import timedelta + +# Django from django.core.urlresolvers import resolve from django.utils.six.moves.urllib.parse import urlparse +from django.utils import timezone from django.contrib.auth.models import User +from django.conf import settings + +# AWX +from awx.main.models.projects import Project +from awx.main.models.base import PERM_INVENTORY_READ +from awx.main.models.ha import Instance +from awx.main.models.fact import Fact from rest_framework.test import ( APIRequestFactory, @@ -10,20 +25,34 @@ from rest_framework.test import ( ) from awx.main.models.credential import Credential -from awx.main.models.projects import Project from awx.main.models.jobs import JobTemplate -from awx.main.models.ha import Instance from awx.main.models.inventory import ( - Inventory, Group, ) from awx.main.models.organization import ( Organization, - Team, + Permission, ) from awx.main.models.rbac import Role +''' +Disable all django model signals. +''' +@pytest.fixture(scope="session", autouse=False) +def disable_signals(): + mocked = mock.patch('django.dispatch.Signal.send', autospec=True) + mocked.start() + +''' +FIXME: Not sure how "far" just setting the BROKER_URL will get us. +We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py) + +Allows django signal code to execute without the need for redis +''' +@pytest.fixture(scope="session", autouse=True) +def celery_memory_broker(): + settings.BROKER_URL='memory://localhost/' @pytest.fixture def user(): @@ -60,11 +89,15 @@ def deploy_jobtemplate(project, inventory, credential): @pytest.fixture def team(organization): - return Team.objects.create(organization=organization, name='test-team') + return organization.teams.create(name='test-team') @pytest.fixture -def project(organization): - prj = Project.objects.create(name="test-project", description="test-project-desc") +@mock.patch.object(Project, "update", lambda self, **kwargs: None) +def project(instance, organization): + prj = Project.objects.create(name="test-proj", + description="test-proj-desc", + scm_type="git", + scm_url="https://github.com/jlaska/ansible-playbooks") prj.organizations.add(organization) return prj @@ -87,7 +120,7 @@ def credential(): @pytest.fixture def inventory(organization): - return Inventory.objects.create(name="test-inventory", organization=organization) + return organization.inventories.create(name="test-inv") @pytest.fixture def role(): @@ -105,12 +138,38 @@ def alice(user): def bob(user): return user('bob', False) +@pytest.fixture +def organizations(instance): + def rf(organization_count=1): + orgs = [] + for i in xrange(0, organization_count): + o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc") + orgs.append(o) + return orgs + return rf + @pytest.fixture def group(inventory): def g(name): return Group.objects.create(inventory=inventory, name=name) return g +@pytest.fixture +def hosts(group): + def rf(host_count=1): + hosts = [] + for i in xrange(0, host_count): + name = '%s-host-%s' % (group.name, i) + (host, created) = group.inventory.hosts.get_or_create(name=name) + if created: + group.hosts.add(host) + hosts.append(host) + return hosts + return rf + + + + @pytest.fixture def permissions(): return { @@ -244,7 +303,46 @@ def options(): return response return rf -@pytest.fixture(scope="session", autouse=True) -def celery_memory_broker(): - from django.conf import settings - settings.BROKER_URL='memory://localhost/' + + +@pytest.fixture +def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json): + def rf(fact_scans=1, timestamp_epoch=timezone.now()): + facts_json = {} + facts = [] + module_names = ['ansible', 'services', 'packages'] + timestamp_current = timestamp_epoch + + facts_json['ansible'] = fact_ansible_json + facts_json['packages'] = fact_packages_json + facts_json['services'] = fact_services_json + + for i in xrange(0, fact_scans): + for host in group.hosts.all(): + for module_name in module_names: + facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name])) + timestamp_current += timedelta(days=1) + return facts + return rf + +def _fact_json(module_name): + current_dir = os.path.dirname(os.path.realpath(__file__)) + with open('%s/%s.json' % (current_dir, module_name)) as f: + return json.load(f) + +@pytest.fixture +def fact_ansible_json(): + return _fact_json('ansible') + +@pytest.fixture +def fact_packages_json(): + return _fact_json('packages') + +@pytest.fixture +def fact_services_json(): + return _fact_json('services') + +@pytest.fixture +def permission_inv_read(organization, inventory, team): + return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ) + diff --git a/awx/main/tests/functional/models/fact/test_get_host_fact.py b/awx/main/tests/functional/models/fact/test_get_host_fact.py new file mode 100644 index 0000000000..2569417496 --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_host_fact.py @@ -0,0 +1,111 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +@pytest.mark.django_db +def test_newest_scan_exact(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch: + fact_known = f + break + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch) + + assert fact_found == fact_known + +''' +Show me the most recent state of the sytem at any point of time. +or, said differently +For any timestamp, get the first scan that is <= the timestamp. +''' + +''' +Ensure most recent scan run is the scan returned. +Query by future date. +''' +@pytest.mark.django_db +def test_newest_scan_less_than(hosts, fact_scans): + epoch = timezone.now() + timestamp_future = epoch + timedelta(days=10) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future) + + assert fact_found == fact_known + +''' +Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp. +''' +@pytest.mark.django_db +def test_query_middle_of_timeline(hosts, fact_scans): + epoch = timezone.now() + timestamp_middle = epoch + timedelta(days=1, hours=3) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle) + + assert fact_found == fact_known + +''' +Query time less than any fact scan. Should return None +''' +@pytest.mark.django_db +def test_query_result_empty(hosts, fact_scans): + epoch = timezone.now() + timestamp_less = epoch - timedelta(days=1) + hosts = hosts(host_count=2) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less) + + assert fact_found is None + +''' +Query by fact module other than 'ansible' +''' +@pytest.mark.django_db +def test_by_module(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known_services = None + fact_known_packages = None + for f in facts: + if f.host_id == hosts[0].id: + if f.module == 'services' and f.timestamp == epoch: + fact_known_services = f + elif f.module == 'packages' and f.timestamp == epoch: + fact_known_packages = f + assert fact_known_services is not None + assert fact_known_packages is not None + + fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch) + fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch) + + assert fact_found_services == fact_known_services + assert fact_found_packages == fact_known_packages + diff --git a/awx/main/tests/functional/models/fact/test_get_timeline.py b/awx/main/tests/functional/models/fact/test_get_timeline.py new file mode 100644 index 0000000000..da3360340a --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_timeline.py @@ -0,0 +1,129 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None): + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + facts_known = [] + for f in facts: + if f.host.id == hosts[0].id: + if module_name and f.module != module_name: + continue + if ts_known and f.timestamp != ts_known: + continue + facts_known.append(f) + fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to) + return (facts_known, fact_objs) + +@pytest.mark.django_db +def test_all(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch) + assert 9 == len(facts_known) + assert 9 == len(fact_objs) + +@pytest.mark.django_db +def test_all_ansible(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + +@pytest.mark.django_db +def test_empty_db(hosts, fact_scans): + hosts = hosts(host_count=2) + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to) + + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_no_results(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=100) + ts_to = epoch - timedelta(days=50) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_exact_same_equal(hosts, fact_scans): + epoch = timezone.now() + ts_to = ts_from = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_exact_from_exclusive_to_inclusive(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + timedelta(days=1) + ts_to = epoch + timedelta(days=2) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_to_lte(hosts, fact_scans): + epoch = timezone.now() + ts_to = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_from_gt(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_no_ts(hosts, fact_scans): + epoch = timezone.now() + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + + diff --git a/awx/main/tests/functional/packages.json b/awx/main/tests/functional/packages.json new file mode 100644 index 0000000000..7bc735d06f --- /dev/null +++ b/awx/main/tests/functional/packages.json @@ -0,0 +1,2922 @@ +[ + { + "name": "kbd", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "centos-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "0.1406.el7.centos.2.3", + "arch": "x86_64" + }, + { + "name": "postfix", + "source": "rpm", + "epoch": 2, + "version": "2.10.1", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "filesystem", + "source": "rpm", + "epoch": null, + "version": "3.2", + "release": "18.el7", + "arch": "x86_64" + }, + { + "name": "tuned", + "source": "rpm", + "epoch": null, + "version": "2.3.0", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "ncurses-base", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "noarch" + }, + { + "name": "aic94xx-firmware", + "source": "rpm", + "epoch": null, + "version": "30", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "kbd-misc", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "noarch" + }, + { + "name": "irqbalance", + "source": "rpm", + "epoch": 2, + "version": "1.0.6", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "tzdata", + "source": "rpm", + "epoch": null, + "version": "2014b", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "openssh-clients", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "glibc-common", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "authconfig", + "source": "rpm", + "epoch": null, + "version": "6.2.8", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "xz-libs", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "btrfs-progs", + "source": "rpm", + "epoch": null, + "version": "3.12", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ncurses-libs", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "sudo", + "source": "rpm", + "epoch": null, + "version": "1.8.6p7", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libsepol", + "source": "rpm", + "epoch": null, + "version": "2.1.9", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iprutils", + "source": "rpm", + "epoch": null, + "version": "2.3.16", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libselinux", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000g2b-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "info", + "source": "rpm", + "epoch": null, + "version": "5.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl7260-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "dbus-libs", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8787-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "sed", + "source": "rpm", + "epoch": null, + "version": "4.2.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "iwl6050-firmware", + "source": "rpm", + "epoch": null, + "version": "41.28.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "chkconfig", + "source": "rpm", + "epoch": null, + "version": "1.3.61", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl1000-firmware", + "source": "rpm", + "epoch": 1, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nspr", + "source": "rpm", + "epoch": null, + "version": "4.10.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000-firmware", + "source": "rpm", + "epoch": null, + "version": "9.221.4.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nss-util", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iwl2000-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "grep", + "source": "rpm", + "epoch": null, + "version": "2.16", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "iwl5150-firmware", + "source": "rpm", + "epoch": null, + "version": "8.24.2.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "gawk", + "source": "rpm", + "epoch": null, + "version": "4.0.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl4965-firmware", + "source": "rpm", + "epoch": null, + "version": "228.61.2.24", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "expat", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "iwl3160-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libattr", + "source": "rpm", + "epoch": null, + "version": "2.4.46", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "iwl3945-firmware", + "source": "rpm", + "epoch": null, + "version": "15.32.2.9", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcap", + "source": "rpm", + "epoch": null, + "version": "2.22", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsemanage-python", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libxml2", + "source": "rpm", + "epoch": null, + "version": "2.9.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-HTTP-Tiny", + "source": "rpm", + "epoch": null, + "version": "0.033", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libgcrypt", + "source": "rpm", + "epoch": null, + "version": "1.5.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Perldoc", + "source": "rpm", + "epoch": null, + "version": "3.20", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "lua", + "source": "rpm", + "epoch": null, + "version": "5.1.4", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "perl-Encode", + "source": "rpm", + "epoch": null, + "version": "2.51", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "pkgconfig", + "source": "rpm", + "epoch": 1, + "version": "0.27.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Usage", + "source": "rpm", + "epoch": null, + "version": "1.63", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "shared-mime-info", + "source": "rpm", + "epoch": null, + "version": "1.1", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-Exporter", + "source": "rpm", + "epoch": null, + "version": "5.68", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcap-ng", + "source": "rpm", + "epoch": null, + "version": "0.7.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-Local", + "source": "rpm", + "epoch": null, + "version": "1.2300", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "libidn", + "source": "rpm", + "epoch": null, + "version": "1.28", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Carp", + "source": "rpm", + "epoch": null, + "version": "1.26", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "gmp", + "source": "rpm", + "epoch": 1, + "version": "5.1.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-PathTools", + "source": "rpm", + "epoch": null, + "version": "3.40", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "p11-kit", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-macros", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "libdaemon", + "source": "rpm", + "epoch": null, + "version": "0.14", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Temp", + "source": "rpm", + "epoch": null, + "version": "0.23.01", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcroco", + "source": "rpm", + "epoch": null, + "version": "0.6.8", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads-shared", + "source": "rpm", + "epoch": null, + "version": "1.43", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libnl3-cli", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-Filter", + "source": "rpm", + "epoch": null, + "version": "1.49", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cyrus-sasl-lib", + "source": "rpm", + "epoch": null, + "version": "2.1.26", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "perl-Getopt-Long", + "source": "rpm", + "epoch": null, + "version": "2.40", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "groff-base", + "source": "rpm", + "epoch": null, + "version": "1.22.2", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "04bbaa7b", + "release": "4c881cbf", + "arch": null + }, + { + "name": "libunistring", + "source": "rpm", + "epoch": null, + "version": "0.9.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "libicu", + "source": "rpm", + "epoch": null, + "version": "50.1.2", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "diffutils", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libdnet", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "13.1.el7", + "arch": "x86_64" + }, + { + "name": "xz", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools", + "source": "rpm", + "epoch": null, + "version": "9.4.0", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "sysvinit-tools", + "source": "rpm", + "epoch": null, + "version": "2.88", + "release": "14.dsf.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools-deploypkg", + "source": "rpm", + "epoch": 0, + "version": "9.4.10", + "release": "3", + "arch": "x86_64" + }, + { + "name": "newt", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-kitchen", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "5.el7", + "arch": "noarch" + }, + { + "name": "ethtool", + "source": "rpm", + "epoch": 2, + "version": "3.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "yum-utils", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "29.el7", + "arch": "noarch" + }, + { + "name": "hostname", + "source": "rpm", + "epoch": null, + "version": "3.13", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "emacs-filesystem", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "gdbm", + "source": "rpm", + "epoch": null, + "version": "1.10", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "emacs-common", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "less", + "source": "rpm", + "epoch": null, + "version": "458", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "epel-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "5", + "arch": "noarch" + }, + { + "name": "p11-kit-trust", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pkcs11-helper", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "nettle", + "source": "rpm", + "epoch": null, + "version": "2.7.1", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "easy-rsa", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "gobject-introspection", + "source": "rpm", + "epoch": null, + "version": "1.36.0", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libevent", + "source": "rpm", + "epoch": null, + "version": "2.0.21", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gsettings-desktop-schemas", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-libs", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "acl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "dhcp", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "elfutils-libs", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "bind-license", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "noarch" + }, + { + "name": "mozjs17", + "source": "rpm", + "epoch": null, + "version": "17.0.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "bind", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "pinentry", + "source": "rpm", + "epoch": null, + "version": "0.8.1", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs-lite", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libselinux-utils", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libedit", + "source": "rpm", + "epoch": null, + "version": "3.0", + "release": "12.20121213cvs.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs-python", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmodman", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "checkpolicy", + "source": "rpm", + "epoch": null, + "version": "2.1.12", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "glib-networking", + "source": "rpm", + "epoch": null, + "version": "2.36.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "setools-libs", + "source": "rpm", + "epoch": null, + "version": "3.3.7", + "release": "46.el7", + "arch": "x86_64" + }, + { + "name": "snappy", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "audit", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "numactl-libs", + "source": "rpm", + "epoch": null, + "version": "2.0.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "autogen-libopts", + "source": "rpm", + "epoch": null, + "version": "5.18", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libverto", + "source": "rpm", + "epoch": null, + "version": "0.2.5", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ntp", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "libsemanage", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "krb5-libs", + "source": "rpm", + "epoch": null, + "version": "1.11.3", + "release": "49.el7", + "arch": "x86_64" + }, + { + "name": "openldap", + "source": "rpm", + "epoch": null, + "version": "2.4.39", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cracklib", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libmount", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "systemd-libs", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libpwquality", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pam", + "source": "rpm", + "epoch": null, + "version": "1.1.8", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "shadow-utils", + "source": "rpm", + "epoch": 2, + "version": "4.1.5.1", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "util-linux", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-libs", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-decorator", + "source": "rpm", + "epoch": null, + "version": "3.4.0", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "gettext", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "centos-logos", + "source": "rpm", + "epoch": null, + "version": "70.0.6", + "release": "1.el7.centos", + "arch": "noarch" + }, + { + "name": "libselinux-python", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python-slip-dbus", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "pyliblzma", + "source": "rpm", + "epoch": null, + "version": "0.5.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "yum-metadata-parser", + "source": "rpm", + "epoch": null, + "version": "1.1.4", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "pyxattr", + "source": "rpm", + "epoch": null, + "version": "0.5.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "python-backports-ssl_match_hostname", + "source": "rpm", + "epoch": null, + "version": "3.4.0.2", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "python-pyudev", + "source": "rpm", + "epoch": null, + "version": "0.15", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "binutils", + "source": "rpm", + "epoch": null, + "version": "2.23.52.0.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "logrotate", + "source": "rpm", + "epoch": null, + "version": "3.8.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "alsa-lib", + "source": "rpm", + "epoch": null, + "version": "1.0.27.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "mariadb-libs", + "source": "rpm", + "epoch": 1, + "version": "5.5.35", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libcurl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "python-urlgrabber", + "source": "rpm", + "epoch": null, + "version": "3.10", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "rpm-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "fipscheck", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "json-c", + "source": "rpm", + "epoch": null, + "version": "0.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "virt-what", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libnetfilter_conntrack", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iproute", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "qrencode-libs", + "source": "rpm", + "epoch": null, + "version": "3.4.1", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "systemd", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "systemd-sysv", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "iputils", + "source": "rpm", + "epoch": null, + "version": "20121221", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-event-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-glib", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "polkit-pkla-compat", + "source": "rpm", + "epoch": null, + "version": "0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "cronie-anacron", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "crontabs", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "6.20121102git.el7", + "arch": "noarch" + }, + { + "name": "device-mapper-event", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi-libs", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "avahi-autoipd", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "dnsmasq", + "source": "rpm", + "epoch": null, + "version": "2.66", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ebtables", + "source": "rpm", + "epoch": null, + "version": "2.0.10", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpciaccess", + "source": "rpm", + "epoch": null, + "version": "0.13.1", + "release": "4.1.el7", + "arch": "x86_64" + }, + { + "name": "fxload", + "source": "rpm", + "epoch": null, + "version": "2002_04_11", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "alsa-tools-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libpipeline", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "gnupg2", + "source": "rpm", + "epoch": null, + "version": "2.0.22", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "rpm-python", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "pygpgme", + "source": "rpm", + "epoch": null, + "version": "0.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "hardlink", + "source": "rpm", + "epoch": 1, + "version": "1.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "dracut-network", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "teamd", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libestr", + "source": "rpm", + "epoch": null, + "version": "0.1.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-tui", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "kernel", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut-config-rescue", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "man-db", + "source": "rpm", + "epoch": null, + "version": "2.6.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "lvm2", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "libgcc", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "setup", + "source": "rpm", + "epoch": null, + "version": "2.8.71", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "microcode_ctl", + "source": "rpm", + "epoch": 2, + "version": "2.1", + "release": "7.1.el7", + "arch": "x86_64" + }, + { + "name": "basesystem", + "source": "rpm", + "epoch": null, + "version": "10.0", + "release": "7.el7.centos", + "arch": "noarch" + }, + { + "name": "biosdevname", + "source": "rpm", + "epoch": null, + "version": "0.5.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "linux-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "openssh-server", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "parted", + "source": "rpm", + "epoch": null, + "version": "3.1", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "nss-softokn-freebl", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy-targeted", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "glibc", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "xfsprogs", + "source": "rpm", + "epoch": null, + "version": "3.2.0", + "release": "0.10.alpha2.el7", + "arch": "x86_64" + }, + { + "name": "libstdc++", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bash", + "source": "rpm", + "epoch": null, + "version": "4.2.45", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "passwd", + "source": "rpm", + "epoch": null, + "version": "0.79", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pcre", + "source": "rpm", + "epoch": null, + "version": "8.32", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "tar", + "source": "rpm", + "epoch": 2, + "version": "1.26", + "release": "29.el7", + "arch": "x86_64" + }, + { + "name": "zlib", + "source": "rpm", + "epoch": null, + "version": "1.2.7", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "rootfiles", + "source": "rpm", + "epoch": null, + "version": "8.1", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "iwl6000g2a-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.3", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libuuid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl2030-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "popt", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl100-firmware", + "source": "rpm", + "epoch": null, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcom_err", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl135-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libdb", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "iwl105-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "bzip2-libs", + "source": "rpm", + "epoch": null, + "version": "1.0.6", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8686-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "readline", + "source": "rpm", + "epoch": null, + "version": "6.2", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "ivtv-firmware", + "source": "rpm", + "epoch": 2, + "version": "20080701", + "release": "26.el7", + "arch": "noarch" + }, + { + "name": "elfutils-libelf", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libertas-usb8388-firmware", + "source": "rpm", + "epoch": 2, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "libgpg-error", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iwl5000-firmware", + "source": "rpm", + "epoch": null, + "version": "8.83.5.1_1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libacl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "f4a80eb5", + "release": "53a7ff4b", + "arch": null + }, + { + "name": "cpio", + "source": "rpm", + "epoch": null, + "version": "2.11", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "perl-parent", + "source": "rpm", + "epoch": 1, + "version": "0.225", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "libnl3", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-podlators", + "source": "rpm", + "epoch": null, + "version": "2.5.1", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "sqlite", + "source": "rpm", + "epoch": null, + "version": "3.7.17", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Escapes", + "source": "rpm", + "epoch": 1, + "version": "1.04", + "release": "285.el7", + "arch": "noarch" + }, + { + "name": "libffi", + "source": "rpm", + "epoch": null, + "version": "3.0.13", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "perl-Text-ParseWords", + "source": "rpm", + "epoch": null, + "version": "3.29", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "glib2", + "source": "rpm", + "epoch": null, + "version": "2.36.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Storable", + "source": "rpm", + "epoch": null, + "version": "2.45", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dbus-glib", + "source": "rpm", + "epoch": null, + "version": "0.100", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-constant", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "findutils", + "source": "rpm", + "epoch": 1, + "version": "4.5.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Socket", + "source": "rpm", + "epoch": null, + "version": "2.010", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "file-libs", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-HiRes", + "source": "rpm", + "epoch": 4, + "version": "1.9725", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libtasn1", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Scalar-List-Utils", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "248.el7", + "arch": "x86_64" + }, + { + "name": "tcp_wrappers-libs", + "source": "rpm", + "epoch": null, + "version": "7.6", + "release": "77.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Simple", + "source": "rpm", + "epoch": 1, + "version": "3.28", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "file", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Path", + "source": "rpm", + "epoch": null, + "version": "2.09", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "nss-softokn", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads", + "source": "rpm", + "epoch": null, + "version": "1.87", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libassuan", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-libs", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs-libs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "which", + "source": "rpm", + "epoch": null, + "version": "2.20", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "66fd4949", + "release": "4803fe57", + "arch": null + }, + { + "name": "libgomp", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "procps-ng", + "source": "rpm", + "epoch": null, + "version": "3.3.10", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "kmod-libs", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "net-tools", + "source": "rpm", + "epoch": null, + "version": "2.0", + "release": "0.17.20131004git.el7", + "arch": "x86_64" + }, + { + "name": "libnfnetlink", + "source": "rpm", + "epoch": null, + "version": "1.0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libmspack", + "source": "rpm", + "epoch": 0, + "version": "0.0.20040308alpha", + "release": "2", + "arch": "x86_64" + }, + { + "name": "slang", + "source": "rpm", + "epoch": null, + "version": "2.2.4", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "python-chardet", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "lzo", + "source": "rpm", + "epoch": null, + "version": "2.06", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "yum", + "source": "rpm", + "epoch": null, + "version": "3.4.3", + "release": "125.el7.centos", + "arch": "noarch" + }, + { + "name": "pciutils-libs", + "source": "rpm", + "epoch": null, + "version": "3.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gpm-libs", + "source": "rpm", + "epoch": null, + "version": "1.20.7", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "keyutils-libs", + "source": "rpm", + "epoch": null, + "version": "1.5.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "liblockfile", + "source": "rpm", + "epoch": null, + "version": "1.08", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "gettext-libs", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "emacs-nox", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libteam", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "352c64e5", + "release": "52ae6884", + "arch": null + }, + { + "name": "ca-certificates", + "source": "rpm", + "epoch": null, + "version": "2013.1.95", + "release": "71.el7", + "arch": "noarch" + }, + { + "name": "openvpn", + "source": "rpm", + "epoch": null, + "version": "2.3.7", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "gnutls", + "source": "rpm", + "epoch": null, + "version": "3.1.18", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "strace", + "source": "rpm", + "epoch": null, + "version": "4.8", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "ModemManager-glib", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "6.git20130913.el7", + "arch": "x86_64" + }, + { + "name": "tmux", + "source": "rpm", + "epoch": null, + "version": "1.8", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "vim-minimal", + "source": "rpm", + "epoch": 2, + "version": "7.4.160", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-common", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "device-mapper-persistent-data", + "source": "rpm", + "epoch": null, + "version": "0.3.2", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhclient", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "libdb-utils", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libss", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bind-utils", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "make", + "source": "rpm", + "epoch": 1, + "version": "3.82", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "nmap-ncat", + "source": "rpm", + "epoch": 2, + "version": "6.40", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "freetype", + "source": "rpm", + "epoch": null, + "version": "2.4.11", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "ncurses", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "python-IPy", + "source": "rpm", + "epoch": null, + "version": "0.75", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "libproxy", + "source": "rpm", + "epoch": null, + "version": "0.4.11", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libcgroup", + "source": "rpm", + "epoch": null, + "version": "0.41", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsoup", + "source": "rpm", + "epoch": null, + "version": "2.42.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils-python", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libndp", + "source": "rpm", + "epoch": null, + "version": "1.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iftop", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "0.7.pre4.el7", + "arch": "x86_64" + }, + { + "name": "libsysfs", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "ntpdate", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "ustr", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "nss-tools", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "openssl-libs", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "gzip", + "source": "rpm", + "epoch": null, + "version": "1.5", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "cracklib-dicts", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "nss", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libuser", + "source": "rpm", + "epoch": null, + "version": "0.60", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "coreutils", + "source": "rpm", + "epoch": null, + "version": "8.22", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libblkid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libutempter", + "source": "rpm", + "epoch": null, + "version": "1.1.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "nss-sysinit", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "dbus-python", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "pygobject3-base", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-slip", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "python-iniparse", + "source": "rpm", + "epoch": null, + "version": "0.4", + "release": "9.el7", + "arch": "noarch" + }, + { + "name": "newt-python", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-configobj", + "source": "rpm", + "epoch": null, + "version": "4.7.2", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "python-backports", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "python-setuptools", + "source": "rpm", + "epoch": null, + "version": "0.9.8", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "grubby", + "source": "rpm", + "epoch": null, + "version": "8.28", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "kmod", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "openssl", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-core-libs", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "libssh2", + "source": "rpm", + "epoch": null, + "version": "1.4.3", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "python-pycurl", + "source": "rpm", + "epoch": null, + "version": "7.19.0", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "curl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "rpm", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "fipscheck-lib", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "openssh", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "dmidecode", + "source": "rpm", + "epoch": 1, + "version": "2.12", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmnl", + "source": "rpm", + "epoch": null, + "version": "1.0.3", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "iptables", + "source": "rpm", + "epoch": null, + "version": "1.4.21", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpcap", + "source": "rpm", + "epoch": 14, + "version": "1.5.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "cryptsetup-libs", + "source": "rpm", + "epoch": null, + "version": "1.6.3", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "dbus", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libgudev1", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "initscripts", + "source": "rpm", + "epoch": null, + "version": "9.49.17", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "polkit", + "source": "rpm", + "epoch": null, + "version": "0.112", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "os-prober", + "source": "rpm", + "epoch": null, + "version": "1.58", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "cronie", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "grub2-tools", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "lvm2-libs", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "wpa_supplicant", + "source": "rpm", + "epoch": 1, + "version": "2.0", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ppp", + "source": "rpm", + "epoch": null, + "version": "2.4.5", + "release": "33.el7", + "arch": "x86_64" + }, + { + "name": "hwdata", + "source": "rpm", + "epoch": null, + "version": "0.252", + "release": "7.3.el7", + "arch": "noarch" + }, + { + "name": "libdrm", + "source": "rpm", + "epoch": null, + "version": "2.4.50", + "release": "1.1.el7", + "arch": "x86_64" + }, + { + "name": "alsa-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "kpartx", + "source": "rpm", + "epoch": null, + "version": "0.4.9", + "release": "66.el7", + "arch": "x86_64" + }, + { + "name": "pth", + "source": "rpm", + "epoch": null, + "version": "2.0.7", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "rpm-build-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "gpgme", + "source": "rpm", + "epoch": null, + "version": "1.3.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "yum-plugin-fastestmirror", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "24.el7", + "arch": "noarch" + }, + { + "name": "kernel-tools-libs", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-scripts", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "jansson", + "source": "rpm", + "epoch": null, + "version": "2.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "rsyslog", + "source": "rpm", + "epoch": null, + "version": "7.4.7", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "kexec-tools", + "source": "rpm", + "epoch": null, + "version": "2.0.4", + "release": "32.el7.centos", + "arch": "x86_64" + }, + { + "name": "grub2", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "kernel-tools", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "firewalld", + "source": "rpm", + "epoch": null, + "version": "0.3.9", + "release": "7.el7", + "arch": "noarch" + } +] diff --git a/awx/main/tests/functional/services.json b/awx/main/tests/functional/services.json new file mode 100644 index 0000000000..a86bf4a875 --- /dev/null +++ b/awx/main/tests/functional/services.json @@ -0,0 +1,697 @@ +[ + { + "source": "sysv", + "state": "running", + "name": "iprdump" + }, + { + "source": "sysv", + "state": "running", + "name": "iprinit" + }, + { + "source": "sysv", + "state": "running", + "name": "iprupdate" + }, + { + "source": "sysv", + "state": "stopped", + "name": "netconsole" + }, + { + "source": "sysv", + "state": "running", + "name": "network" + }, + { + "source": "systemd", + "state": "stopped", + "name": "arp-ethers.service" + }, + { + "source": "systemd", + "state": "running", + "name": "auditd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "autovt@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "avahi-daemon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "blk-availability.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "brandbot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-getty.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-shell.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "cpupower.service" + }, + { + "source": "systemd", + "state": "running", + "name": "crond.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.fedoraproject.FirewallD1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.Avahi.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.hostname1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.locale1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.login1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.machine1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.nm-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.timedate1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "debug-shell.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dhcpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcpd6.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcrelay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dm-event.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dnsmasq.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-cmdline.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-initqueue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-pivot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-udev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-shutdown.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ebtables.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "emergency.service" + }, + { + "source": "systemd", + "state": "running", + "name": "firewalld.service" + }, + { + "source": "systemd", + "state": "running", + "name": "getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "halt-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-cleanup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-parse-etc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-udevadm-cleanup-db.service" + }, + { + "source": "systemd", + "state": "running", + "name": "irqbalance.service" + }, + { + "source": "systemd", + "state": "running", + "name": "kdump.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "kmod-static-nodes.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-lvmetad.service" + }, + { + "source": "systemd", + "state": "running", + "name": "lvm2-monitor.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-pvscan@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "messagebus.service" + }, + { + "source": "systemd", + "state": "running", + "name": "microcode.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "named-setup-rndc.service" + }, + { + "source": "systemd", + "state": "running", + "name": "named.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "NetworkManager-wait-online.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "ntpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ntpdate.service" + }, + { + "source": "systemd", + "state": "running", + "name": "openvpn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit-wait.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-read-write.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-start.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "polkit.service" + }, + { + "source": "systemd", + "state": "running", + "name": "postfix.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "quotaon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rc-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rdisc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rescue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel-mark.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-configure.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-dmesg.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-domainname.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-import-state.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-loadmodules.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-readonly.service" + }, + { + "source": "systemd", + "state": "running", + "name": "rsyslog.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "serial-getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd-keygen.service" + }, + { + "source": "systemd", + "state": "running", + "name": "sshd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-console.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-plymouth.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-wall.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-backlight@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-binfmt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hibernate.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hostnamed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hybrid-sleep.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-initctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journal-flush.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journald.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-localed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-logind.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-machined.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-modules-load.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-nspawn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-quotacheck.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-random-seed.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-collect.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-readahead-done.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-drop.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-replay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-remount-fs.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-shutdownd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-suspend.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-sysctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-timedated.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-clean.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup-dev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-settle.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udevd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp-runlevel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-user-sessions.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-vconsole-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "teamd@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "tuned.service" + }, + { + "source": "systemd", + "state": "running", + "name": "vmtoolsd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "wpa_supplicant.service" + } +] diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py new file mode 100644 index 0000000000..ffa6027f73 --- /dev/null +++ b/awx/main/tests/functional/test_notifications.py @@ -0,0 +1,115 @@ +import mock +import pytest + +from awx.main.models.notifications import Notifier +from awx.main.models.inventory import Inventory, Group +from awx.main.models.jobs import JobTemplate + +from django.core.urlresolvers import reverse + +@pytest.fixture +def notifier(): + return Notifier.objects.create(name="test-notification", + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})) + +@pytest.mark.django_db +def test_get_notifier_list(get, user, notifier): + url = reverse('api:notifier_list') + response = get(url, user('admin', True)) + assert response.status_code == 200 + assert len(response.data['results']) == 1 + +@pytest.mark.django_db +def test_basic_parameterization(get, post, user, organization): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-webhook", + description="test webhook", + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert 'related' in response.data + assert 'organization' in response.data['related'] + assert 'summary_fields' in response.data + assert 'organization' in response.data['summary_fields'] + assert 'notifications' in response.data['related'] + assert 'notification_configuration' in response.data + assert 'url' in response.data['notification_configuration'] + assert 'headers' in response.data['notification_configuration'] + +@pytest.mark.django_db +def test_encrypted_subfields(get, post, user, organization): + def assert_send(self, messages): + assert self.account_token == "shouldhide" + return 1 + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-twilio", + description="test twilio", + organization=organization.id, + notification_type="twilio", + notification_configuration=dict(account_sid="dummy", + account_token="shouldhide", + from_number="+19999999999", + to_numbers=["9998887777"])), + u) + assert response.status_code == 201 + notifier_actual = Notifier.objects.get(id=response.data['id']) + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert response.data['notification_configuration']['account_token'] == "$encrypted$" + with mock.patch.object(notifier_actual.notification_class, "send_messages", assert_send): + notifier_actual.send("Test", {'body': "Test"}) + +@pytest.mark.django_db +def test_inherited_notifiers(get, post, user, organization, project): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + notifiers = [] + for nfiers in xrange(3): + response = post(url, + dict(name="test-webhook-{}".format(nfiers), + description="test webhook {}".format(nfiers), + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + notifiers.append(response.data['id']) + organization.projects.add(project) + i = Inventory.objects.create(name='test', organization=organization) + i.save() + g = Group.objects.create(name='test', inventory=i) + g.save() + jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml') + jt.save() + url = reverse('api:organization_notifiers_any_list', args=(organization.id,)) + response = post(url, dict(id=notifiers[0]), u) + assert response.status_code == 204 + url = reverse('api:project_notifiers_any_list', args=(project.id,)) + response = post(url, dict(id=notifiers[1]), u) + assert response.status_code == 204 + url = reverse('api:job_template_notifiers_any_list', args=(jt.id,)) + response = post(url, dict(id=notifiers[2]), u) + assert response.status_code == 204 + assert len(jt.notifiers['any']) == 3 + assert len(project.notifiers['any']) == 2 + assert len(g.inventory_source.notifiers['any']) == 1 + +@pytest.mark.django_db +def test_notifier_merging(get, post, user, organization, project, notifier): + user('admin-poster', True) + organization.projects.add(project) + organization.notifiers_any.add(notifier) + project.notifiers_any.add(notifier) + assert len(project.notifiers['any']) == 1 diff --git a/awx/main/tests/old/ad_hoc.py b/awx/main/tests/old/ad_hoc.py index 104c67d1fa..a912f7a89b 100644 --- a/awx/main/tests/old/ad_hoc.py +++ b/awx/main/tests/old/ad_hoc.py @@ -128,8 +128,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest): self.assertFalse(ad_hoc_command.passwords_needed_to_start) self.assertTrue(ad_hoc_command.signal_start()) ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk) - self.check_job_result(ad_hoc_command, 'failed') - self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable') + self.check_job_result(ad_hoc_command, 'successful') + self.check_ad_hoc_command_events(ad_hoc_command, 'skipped') @mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0)) def test_cancel_ad_hoc_command(self, ignore): diff --git a/awx/main/tests/old/commands/run_fact_cache_receiver.py b/awx/main/tests/old/commands/run_fact_cache_receiver.py deleted file mode 100644 index 7dedf7657a..0000000000 --- a/awx/main/tests/old/commands/run_fact_cache_receiver.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import time -from datetime import datetime -import mock -import unittest2 as unittest -from copy import deepcopy -from mock import MagicMock - -# AWX -from awx.main.tests.base import BaseTest -from awx.fact.tests.base import MongoDBRequired -from command_base import BaseCommandMixin -from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver -from awx.fact.models.fact import * # noqa - -__all__ = ['RunFactCacheReceiverUnitTest', 'RunFactCacheReceiverFunctionalTest'] - -TEST_MSG_BASE = { - 'host': 'hostname1', - 'date_key': time.mktime(datetime.utcnow().timetuple()), - 'facts' : { }, - 'inventory_id': 1 -} - -TEST_MSG_MODULES = { - 'packages': { - "accountsservice": [ - { - "architecture": "amd64", - "name": "accountsservice", - "source": "apt", - "version": "0.6.35-0ubuntu7.1" - } - ], - "acpid": [ - { - "architecture": "amd64", - "name": "acpid", - "source": "apt", - "version": "1:2.0.21-1ubuntu2" - } - ], - "adduser": [ - { - "architecture": "all", - "name": "adduser", - "source": "apt", - "version": "3.113+nmu3ubuntu3" - } - ], - }, - 'services': [ - { - "name": "acpid", - "source": "sysv", - "state": "running" - }, - { - "name": "apparmor", - "source": "sysv", - "state": "stopped" - }, - { - "name": "atd", - "source": "sysv", - "state": "running" - }, - { - "name": "cron", - "source": "sysv", - "state": "running" - } - ], - 'ansible': { - 'ansible_fact_simple': 'hello world', - 'ansible_fact_complex': { - 'foo': 'bar', - 'hello': [ - 'scooby', - 'dooby', - 'doo' - ] - }, - } -} -# Derived from TEST_MSG_BASE -TEST_MSG = dict(TEST_MSG_BASE) - -TEST_MSG_LARGE = {u'ansible_product_version': u'To Be Filled By O.E.M.', u'ansible_memory_mb': {u'real': {u'total': 32062, u'used': 8079, u'free': 23983}, u'swap': {u'cached': 0, u'total': 0, u'used': 0, u'free': 0}, u'nocache': {u'used': 4339, u'free': 27723}}, u'ansible_user_dir': u'/root', u'ansible_userspace_bits': u'64', u'ansible_distribution_version': u'14.04', u'ansible_virtualization_role': u'guest', u'ansible_env': {u'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS': u'False', u'LC_CTYPE': u'en_US.UTF-8', u'JOB_CALLBACK_DEBUG': u'1', u'_MP_FORK_LOGFILE_': u'', u'HOME': u'/', u'REST_API_TOKEN': u'122-5deb0d6fcec85f3bf44fec6ce170600c', u'LANG': u'en_US.UTF-8', u'SHELL': u'/bin/bash', u'_MP_FORK_LOGFORMAT_': u'[%(asctime)s: %(levelname)s/%(processName)s] %(message)s', u'_': u'/usr/bin/make', u'DJANGO_PROJECT_DIR': u'/tower_devel', u'MFLAGS': u'-w', u'JOB_ID': u'122', u'PYTHONPATH': u'/tower_devel/awx/lib/site-packages:', u'_MP_FORK_LOGLEVEL_': u'10', u'ANSIBLE_CACHE_PLUGIN_CONNECTION': u'tcp://127.0.0.1:6564', u'ANSIBLE_LIBRARY': u'/tower_devel/awx/plugins/library', u'CELERY_LOG_LEVEL': u'10', u'HOSTNAME': u'2842b3619fa8', u'MAKELEVEL': u'2', u'TMUX_PANE': u'%1', u'DJANGO_LIVE_TEST_SERVER_ADDRESS': u'localhost:9013-9199', u'CELERY_LOG_REDIRECT': u'1', u'PATH': u'/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin', u'CALLBACK_CONSUMER_PORT': u'tcp://127.0.0.1:5557', u'MAKEFLAGS': u'w', u'ANSIBLE_CALLBACK_PLUGINS': u'/tower_devel/awx/plugins/callback', u'TERM': u'screen', u'TZ': u'America/New_York', u'LANGUAGE': u'en_US:en', u'ANSIBLE_SSH_CONTROL_PATH': u'/tmp/ansible_tower_y3xGdA/cp/ansible-ssh-%%h-%%p-%%r', u'SHLVL': u'1', u'CELERY_LOG_FILE': u'', u'ANSIBLE_HOST_KEY_CHECKING': u'False', u'TMUX': u'/tmp/tmux-0/default,3719,0', u'CELERY_LOADER': u'djcelery.loaders.DjangoLoader', u'LC_ALL': u'en_US.UTF-8', u'ANSIBLE_FORCE_COLOR': u'True', u'REST_API_URL': u'http://127.0.0.1:8013', u'CELERY_LOG_REDIRECT_LEVEL': u'WARNING', u'INVENTORY_HOSTVARS': u'True', u'ANSIBLE_CACHE_PLUGIN': u'tower', u'INVENTORY_ID': u'1', u'PWD': u'/tower_devel/awx/playbooks', u'DJANGO_SETTINGS_MODULE': u'awx.settings.development', u'ANSIBLE_CACHE_PLUGINS': u'/tower_devel/awx/plugins/fact_caching'}, u'ansible_lo': {u'mtu': 65536, u'device': u'lo', u'promisc': False, u'ipv4': {u'netmask': u'255.0.0.0', u'network': u'127.0.0.0', u'address': u'127.0.0.1'}, u'ipv6': [{u'scope': u'host', u'prefix': u'128', u'address': u'::1'}], u'active': True, u'type': u'loopback'}, u'ansible_memtotal_mb': 32062, u'ansible_architecture': u'x86_64', u'ansible_default_ipv4': {u'alias': u'eth0', u'netmask': u'255.255.0.0', u'macaddress': u'02:42:ac:11:00:01', u'network': u'172.17.0.0', u'address': u'172.17.0.1', u'interface': u'eth0', u'type': u'ether', u'gateway': u'172.17.42.1', u'mtu': 1500}, u'ansible_swapfree_mb': 0, u'ansible_default_ipv6': {}, u'ansible_cmdline': {u'nomodeset': True, u'rw': True, u'initrd': u'EFIarchinitramfs-arch.img', u'rootfstype': u'ext4', u'root': u'/dev/sda4', u'systemd.unit': u'graphical.target'}, u'ansible_selinux': False, u'ansible_userspace_architecture': u'x86_64', u'ansible_product_uuid': u'00020003-0004-0005-0006-000700080009', u'ansible_pkg_mgr': u'apt', u'ansible_memfree_mb': 23983, u'ansible_distribution': u'Ubuntu', u'ansible_processor_count': 1, u'ansible_hostname': u'2842b3619fa8', u'ansible_all_ipv6_addresses': [u'fe80::42:acff:fe11:1'], u'ansible_interfaces': [u'lo', u'eth0'], u'ansible_kernel': u'4.0.1-1-ARCH', u'ansible_fqdn': u'2842b3619fa8', u'ansible_mounts': [{u'uuid': u'NA', u'size_total': 10434699264, u'mount': u'/', u'size_available': 4918865920, u'fstype': u'ext4', u'device': u'/dev/mapper/docker-8:4-18219321-2842b3619fa885d19e47302009754a4bfd54c1b32c7f21e98f38c7fe7412d3d0', u'options': u'rw,relatime,discard,stripe=16,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/tower_devel', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/resolv.conf', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hostname', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hosts', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}], u'ansible_user_shell': u'/bin/bash', u'ansible_nodename': u'2842b3619fa8', u'ansible_product_serial': u'To Be Filled By O.E.M.', u'ansible_form_factor': u'Desktop', u'ansible_fips': False, u'ansible_user_id': u'root', u'ansible_domain': u'', u'ansible_date_time': {u'month': u'05', u'second': u'47', u'iso8601_micro': u'2015-05-01T19:46:47.868456Z', u'year': u'2015', u'date': u'2015-05-01', u'iso8601': u'2015-05-01T19:46:47Z', u'day': u'01', u'minute': u'46', u'tz': u'EDT', u'hour': u'15', u'tz_offset': u'-0400', u'epoch': u'1430509607', u'weekday': u'Friday', u'time': u'15:46:47'}, u'ansible_processor_cores': 4, u'ansible_processor_vcpus': 4, u'ansible_bios_version': u'P1.80', u'ansible_processor': [u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz'], u'ansible_virtualization_type': u'docker', u'ansible_distribution_release': u'trusty', u'ansible_system_vendor': u'To Be Filled By O.E.M.', u'ansible_os_family': u'Debian', u'ansible_user_gid': 0, u'ansible_swaptotal_mb': 0, u'ansible_system': u'Linux', u'ansible_devices': {u'sda': {u'sectorsize': u'4096', u'vendor': u'ATA', u'host': u'', u'support_discard': u'0', u'model': u'ST1000DM003-9YN1', u'size': u'7.28 TB', u'scheduler_mode': u'cfq', u'rotational': u'1', u'sectors': u'1953525168', u'removable': u'0', u'holders': [], u'partitions': {u'sda4': {u'start': u'820979712', u'sectorsize': 512, u'sectors': u'1132545423', u'size': u'540.04 GB'}, u'sda2': {u'start': u'206848', u'sectorsize': 512, u'sectors': u'262144', u'size': u'128.00 MB'}, u'sda3': {u'start': u'468992', u'sectorsize': 512, u'sectors': u'820510720', u'size': u'391.25 GB'}, u'sda1': {u'start': u'2048', u'sectorsize': 512, u'sectors': u'204800', u'size': u'100.00 MB'}}}}, u'ansible_user_uid': 0, u'ansible_distribution_major_version': u'14', u'ansible_lsb': {u'major_release': u'14', u'release': u'14.04', u'codename': u'trusty', u'description': u'Ubuntu 14.04.1 LTS', u'id': u'Ubuntu'}, u'ansible_bios_date': u'12/05/2012', u'ansible_machine': u'x86_64', u'ansible_user_gecos': u'root', u'ansible_processor_threads_per_core': 1, u'ansible_eth0': {u'device': u'eth0', u'promisc': False, u'macaddress': u'02:42:ac:11:00:01', u'ipv4': {u'netmask': u'255.255.0.0', u'network': u'172.17.0.0', u'address': u'172.17.0.1'}, u'ipv6': [{u'scope': u'link', u'prefix': u'64', u'address': u'fe80::42:acff:fe11:1'}], u'active': True, u'type': u'ether', u'mtu': 1500}, u'ansible_product_name': u'To Be Filled By O.E.M.', u'ansible_all_ipv4_addresses': [u'172.17.0.1'], u'ansible_python_version': u'2.7.6'} # noqa - -def copy_only_module(data, module): - data = deepcopy(data) - data['facts'] = {} - if module == 'ansible': - data['facts'] = deepcopy(TEST_MSG_MODULES[module]) - else: - data['facts'][module] = deepcopy(TEST_MSG_MODULES[module]) - return data - - -class RunFactCacheReceiverFunctionalTest(BaseCommandMixin, BaseTest, MongoDBRequired): - @unittest.skip('''\ -TODO: run_fact_cache_receiver enters a while True loop that never exists. \ -This differs from most other commands that we test for. More logic and work \ -would be required to invoke this case from the command line with little return \ -in terms of increase coverage and confidence.''') - def test_invoke(self): - result, stdout, stderr = self.run_command('run_fact_cache_receiver') - self.assertEqual(result, None) - -class RunFactCacheReceiverUnitTest(BaseTest, MongoDBRequired): - - # TODO: Check that timestamp and other attributes are as expected - def check_process_fact_message_module(self, data, module): - fact_found = None - facts = Fact.objects.all() - self.assertEqual(len(facts), 1) - for fact in facts: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - #self.assertEqual(data['facts'][module], fact_found[module]) - - fact_found = None - fact_versions = FactVersion.objects.all() - self.assertEqual(len(fact_versions), 1) - for fact in fact_versions: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - - - # Ensure that the message flows from the socket through to process_fact_message() - @mock.patch('awx.main.socket.Socket.listen') - def test_run_receiver(self, listen_mock): - listen_mock.return_value = [TEST_MSG] - - receiver = FactCacheReceiver() - receiver.process_fact_message = MagicMock(name='process_fact_message') - receiver.run_receiver(use_processing_threads=False) - - receiver.process_fact_message.assert_called_once_with(TEST_MSG) - - def test_process_fact_message_ansible(self): - data = copy_only_module(TEST_MSG, 'ansible') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'ansible') - - def test_process_fact_message_packages(self): - data = copy_only_module(TEST_MSG, 'packages') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'packages') - - def test_process_fact_message_services(self): - data = copy_only_module(TEST_MSG, 'services') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'services') - - - # Ensure that only a single host gets created for multiple invocations with the same hostname - def test_process_fact_message_single_host_created(self): - receiver = FactCacheReceiver() - - data = deepcopy(TEST_MSG) - receiver.process_fact_message(data) - data = deepcopy(TEST_MSG) - data['date_key'] = time.mktime(datetime.utcnow().timetuple()) - receiver.process_fact_message(data) - - fact_hosts = FactHost.objects.all() - self.assertEqual(len(fact_hosts), 1) - - def test_process_facts_message_ansible_overwrite(self): - data = copy_only_module(TEST_MSG, 'ansible') - key = 'ansible.overwrite' - value = 'hello world' - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data = copy_only_module(TEST_MSG, 'ansible') - data['facts'][key] = value - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertIn(key, fact.fact) - self.assertEqual(fact.fact[key], value) - self.assertEqual(fact.fact, data['facts']) - - def test_large_overwrite(self): - data = deepcopy(TEST_MSG_BASE) - data['facts'] = { - 'ansible': {} - } - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data['facts']['ansible'] = TEST_MSG_LARGE - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertEqual(fact.fact, data['facts']['ansible']) diff --git a/awx/main/tests/old/fact/fact_api.py b/awx/main/tests/old/fact/fact_api.py deleted file mode 100644 index d13b17f060..0000000000 --- a/awx/main/tests/old/fact/fact_api.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import unittest2 as unittest - -# Django -from django.core.urlresolvers import reverse - -# AWX -from awx.main.utils import timestamp_apiformat -from awx.main.models import * # noqa -from awx.main.tests.base import BaseLiveServerTest -from awx.fact.models import * # noqa -from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES -from awx.main.utils import build_url - -__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',] - -class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin): - def setUp(self): - super(FactApiBaseTest, self).setUp() - self.create_test_license_file() - self.setup_instances() - self.setup_users() - self.organization = self.make_organization(self.super_django_user) - self.organization.admins.add(self.normal_django_user) - self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory') - self.host = self.inventory.hosts.create(name='host.example.com') - self.host2 = self.inventory.hosts.create(name='host2.example.com') - self.host3 = self.inventory.hosts.create(name='host3.example.com') - - def setup_facts(self, scan_count): - self.builder = FactScanBuilder() - self.builder.set_inventory_id(self.inventory.pk) - self.builder.add_fact('ansible', TEST_FACT_ANSIBLE) - self.builder.add_fact('packages', TEST_FACT_PACKAGES) - self.builder.add_fact('services', TEST_FACT_SERVICES) - self.builder.add_hostname('host.example.com') - self.builder.add_hostname('host2.example.com') - self.builder.add_hostname('host3.example.com') - self.builder.build(scan_count=scan_count, host_count=3) - - self.fact_host = FactHost.objects.get(hostname=self.host.name) - -class FactVersionApiTest(FactApiBaseTest): - def check_equal(self, fact_versions, results): - def find(element, set1): - for e in set1: - if all([ e.get(field) == element.get(field) for field in element.keys()]): - return e - return None - - self.assertEqual(len(results), len(fact_versions)) - for v in fact_versions: - v_dict = { - 'timestamp': timestamp_apiformat(v.timestamp), - 'module': v.module - } - e = find(v_dict, results) - self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results)) - - def get_list(self, fact_versions, params=None): - url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_versions, response['results']) - return response - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def test_list_empty(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertIn('results', response) - self.assertIsInstance(response['results'], list) - self.assertEqual(len(response['results']), 0) - - def test_list_related_fact_view(self): - self.setup_facts(2) - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - for entry in response['results']: - self.assertIn('fact_view', entry['related']) - self.get(entry['related']['fact_view'], expect=200) - - def test_list(self): - self.setup_facts(2) - self.get_list(FactVersion.objects.filter(host=self.fact_host)) - - def test_list_module(self): - self.setup_facts(10) - self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages')) - - def test_list_time_from(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - } - # 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from']) - self.get_list(fact_versions, params) - - def test_list_time_to(self): - self.setup_facts(10) - - params = { - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - def test_list_time_from_to(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - -class FactViewApiTest(FactApiBaseTest): - def check_equal(self, fact_obj, results): - fact_dict = { - 'timestamp': timestamp_apiformat(fact_obj.timestamp), - 'module': fact_obj.module, - 'host': { - 'hostname': fact_obj.host.hostname, - 'inventory_id': fact_obj.host.inventory_id, - 'id': str(fact_obj.host.id) - }, - 'fact': fact_obj.fact - } - self.assertEqual(fact_dict, results) - - def test_permission_view(self): - url = reverse('api:host_fact_compare_view', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def get_fact(self, fact_obj, params=None): - url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_obj, response) - - def test_view(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0]) - - def test_view_module_filter(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services')) - - def test_view_time_filter(self): - self.setup_facts(6) - ts = self.builder.get_timestamp(3) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0], - dict(datetime=ts)) - - -@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document") -class SingleFactApiTest(FactApiBaseTest): - def setUp(self): - super(SingleFactApiTest, self).setUp() - - self.group = self.inventory.groups.create(name='test-group') - self.group.hosts.add(self.host, self.host2, self.host3) - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def _test_related(self, url): - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertTrue(len(response['results']) > 0) - for entry in response['results']: - self.assertIn('single_fact', entry['related']) - # Requires fields - self.get(entry['related']['single_fact'], expect=400) - - def test_related_host_list(self): - self.setup_facts(2) - self._test_related(reverse('api:host_list')) - - def test_related_group_list(self): - self.setup_facts(2) - self._test_related(reverse('api:group_list')) - - def test_related_inventory_list(self): - self.setup_facts(2) - self._test_related(reverse('api:inventory_list')) - - def test_params(self): - self.setup_facts(2) - params = { - 'module': 'packages', - 'fact_key': 'name', - 'fact_value': 'acpid', - } - url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertEqual(len(response['results']), 3) - for entry in response['results']: - self.assertEqual(entry['fact'][0]['name'], 'acpid') diff --git a/awx/main/utils.py b/awx/main/utils.py index 00bfc74608..96f8e2c0ff 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -139,12 +139,13 @@ def get_encryption_key(instance, field_name): h.update(field_name) return h.digest()[:16] - -def encrypt_field(instance, field_name, ask=False): +def encrypt_field(instance, field_name, ask=False, subfield=None): ''' Return content of the given instance and field name encrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'): return value value = smart_str(value) @@ -157,11 +158,13 @@ def encrypt_field(instance, field_name, ask=False): return '$encrypted$%s$%s' % ('AES', b64data) -def decrypt_field(instance, field_name): +def decrypt_field(instance, field_name, subfield=None): ''' Return content of the given instance and field name decrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or not value.startswith('$encrypted$'): return value algo, b64data = value[len('$encrypted$'):].split('$', 1) diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index 99573983b2..ddffcaf974 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -2,10 +2,10 @@ # This file is a utility Ansible plugin that is not part of the AWX or Ansible # packages. It does not import any code from either package, nor does its # license apply to Ansible or AWX. -# +# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: -# +# # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # @@ -90,8 +90,12 @@ CENSOR_FIELD_WHITELIST=[ 'skip_reason', ] -def censor(obj): - if obj.get('_ansible_no_log', False): +def censor(obj, no_log=False): + if not isinstance(obj, dict): + if no_log: + return "the output has been hidden due to the fact that 'no_log: true' was specified for this result" + return obj + if obj.get('_ansible_no_log', no_log): new_obj = {} for k in CENSOR_FIELD_WHITELIST: if k in obj: @@ -104,8 +108,12 @@ def censor(obj): new_obj['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" obj = new_obj if 'results' in obj: - for i in xrange(len(obj['results'])): - obj['results'][i] = censor(obj['results'][i]) + if isinstance(obj['results'], list): + for i in xrange(len(obj['results'])): + obj['results'][i] = censor(obj['results'][i], obj.get('_ansible_no_log', no_log)) + elif obj.get('_ansible_no_log', False): + obj['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" + return obj class TokenAuth(requests.auth.AuthBase): @@ -460,7 +468,7 @@ class JobCallbackModule(BaseCallbackModule): # this from a normal task self._log_event('playbook_on_task_start', task=task, name=task.get_name()) - + def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None): @@ -529,6 +537,7 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def __init__(self): self.ad_hoc_command_id = int(os.getenv('AD_HOC_COMMAND_ID', '0')) self.rest_api_path = '/api/v1/ad_hoc_commands/%d/events/' % self.ad_hoc_command_id + self.skipped_hosts = set() super(AdHocCommandCallbackModule, self).__init__() def _log_event(self, event, **event_data): @@ -539,6 +548,18 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def runner_on_file_diff(self, host, diff): pass # Ignore file diff for ad hoc commands. + def runner_on_ok(self, host, res): + # When running in check mode using a module that does not support check + # mode, Ansible v1.9 will call runner_on_skipped followed by + # runner_on_ok for the same host; only capture the skipped event and + # ignore the ok event. + if host not in self.skipped_hosts: + super(AdHocCommandCallbackModule, self).runner_on_ok(host, res) + + def runner_on_skipped(self, host, item=None): + super(AdHocCommandCallbackModule, self).runner_on_skipped(host, item) + self.skipped_hosts.add(host) + if os.getenv('JOB_ID', ''): CallbackModule = JobCallbackModule diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 85a234314e..76381e5ac0 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -342,6 +342,10 @@ CELERYBEAT_SCHEDULE = { 'task': 'awx.main.tasks.tower_periodic_scheduler', 'schedule': timedelta(seconds=30) }, + 'admin_checks': { + 'task': 'awx.main.tasks.run_administrative_checks', + 'schedule': timedelta(days=30) + }, } # Social Auth configuration. @@ -677,6 +681,10 @@ FACT_CACHE_PORT = 6564 ORG_ADMINS_CAN_SEE_ALL_USERS = True +TOWER_ADMIN_ALERTS = True + +TOWER_URL_BASE = "https://towerhost" + TOWER_SETTINGS_MANIFEST = { "SCHEDULE_MAX_JOBS": { "name": "Maximum Scheduled Jobs", @@ -804,6 +812,20 @@ TOWER_SETTINGS_MANIFEST = { "type": "bool", "category": "system", }, + "TOWER_ADMIN_ALERTS": { + "name": "Enable Tower Administrator Alerts", + "description": "Allow Tower to email Admin users for system events that may require attention", + "default": TOWER_ADMIN_ALERTS, + "type": "bool", + "category": "system", + }, + "TOWER_URL_BASE": { + "name": "Base URL of the Tower host", + "description": "This is used by services like Notifications to render a valid url to the Tower host", + "default": TOWER_URL_BASE, + "type": "string", + "category": "system", + }, "LICENSE": { "name": "Tower License", "description": "Controls what features and functionality is enabled in Tower.", diff --git a/awx/settings/development.py b/awx/settings/development.py index a214ab4670..46df026e06 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -13,7 +13,6 @@ from split_settings.tools import optional, include # Load default settings. from defaults import * # NOQA - MONGO_HOST = '127.0.0.1' MONGO_PORT = 27017 MONGO_USERNAME = None @@ -66,10 +65,13 @@ PASSWORD_HASHERS = ( # Configure a default UUID for development only. SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' -STATSD_CLIENT = 'django_statsd.clients.normal' -STATSD_HOST = 'graphite' +STATSD_CLIENT = 'django_statsd.clients.null' +STATSD_HOST = None +STATSD_PREFIX = None +#STATSD_CLIENT = 'django_statsd.clients.normal' +#STATSD_HOST = 'graphite' STATSD_PORT = 8125 -STATSD_PREFIX = 'tower' +#STATSD_PREFIX = 'tower' STATSD_MAXUDPSIZE = 512 # If there is an `/etc/tower/settings.py`, include it. diff --git a/awx/ui/client/src/app.js b/awx/ui/client/src/app.js index 6d32be86fd..4cbd091356 100644 --- a/awx/ui/client/src/app.js +++ b/awx/ui/client/src/app.js @@ -172,7 +172,7 @@ var tower = angular.module('Tower', [ 'SchedulesHelper', 'JobsListDefinition', 'LogViewerStatusDefinition', - 'LogViewerHelper', + 'StandardOutHelper', 'LogViewerOptionsDefinition', 'EventViewerHelper', 'HostEventsViewerHelper', @@ -200,9 +200,9 @@ var tower = angular.module('Tower', [ .config(['$pendolyticsProvider', function($pendolyticsProvider) { $pendolyticsProvider.doNotAutoStart(); }]) - .config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', - function ($stateProvider, $urlRouterProvider, $breadcrumbProvider) { - + .config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', '$urlMatcherFactoryProvider', + function ($stateProvider, $urlRouterProvider, $breadcrumbProvider, $urlMatcherFactoryProvider) { + $urlMatcherFactoryProvider.strictMode(false) $breadcrumbProvider.setOptions({ templateUrl: urlPrefix + 'partials/breadcrumb.html' }); diff --git a/awx/ui/client/src/controllers/Home.js b/awx/ui/client/src/controllers/Home.js index d857330d4b..3d8adebad2 100644 --- a/awx/ui/client/src/controllers/Home.js +++ b/awx/ui/client/src/controllers/Home.js @@ -149,7 +149,7 @@ Home.$inject = ['$scope', '$compile', '$stateParams', '$rootScope', '$location', * @description This controls the 'home/groups' page that is loaded from the dashboard * */ -export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, LogViewer, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, +export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, GetBasePath, SearchInit, PaginateInit, FormatDate, GetHostsStatusMsg, GetSyncStatusMsg, ViewUpdateStatus, GroupsEdit, Wait, Alert, Rest, Empty, InventoryUpdate, Find, GroupsCancelUpdate, Store) { @@ -461,58 +461,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio attachElem(event, html, title); }); - if (scope.removeGroupSummaryReady) { - scope.removeGroupSummaryReady(); - } - scope.removeGroupSummaryReady = scope.$on('GroupSummaryReady', function(e, event, inventory, data) { - var html, title; - - Wait('stop'); - - // Build the html for our popover - html = "\n"; - html += "\n"; - html += ""; - html += ""; - html += ""; - html += ""; - html += ""; - html += "\n"; - html += "\n"; - data.results.forEach( function(row) { - html += ""; - html += ""; - html += ""; - html += ""; - html += "\n"; - }); - html += "\n"; - html += "
StatusLast SyncGroup
" + ($filter('longDate')(row.last_updated)).replace(/ /,'
') + "
" + ellipsis(row.summary_fields.group.name) + "
\n"; - title = "Sync Status"; - attachElem(event, html, title); - }); - - scope.showGroupSummary = function(event, id) { - var group, status; - if (!Empty(id)) { - group = Find({ list: scope.home_groups, key: 'id', val: id }); - status = group.summary_fields.inventory_source.status; - if (status === 'running' || status === 'failed' || status === 'error' || status === 'successful') { - Wait('start'); - Rest.setUrl(group.related.inventory_sources + '?or__source=ec2&or__source=rax&order_by=-last_job_run&page_size=5'); - Rest.get() - .success(function(data) { - scope.$emit('GroupSummaryReady', event, group, data); - }) - .error(function(data, status) { - ProcessErrors( scope, data, status, null, { hdr: 'Error!', - msg: 'Call to ' + group.related.inventory_sources + ' failed. GET returned status: ' + status - }); - }); - } - } - }; - scope.showHostSummary = function(event, id) { var url, jobs = []; if (!Empty(id)) { @@ -549,13 +497,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio } }; - scope.viewJob = function(url) { - LogViewer({ - scope: modal_scope, - url: url - }); - }; - scope.cancelUpdate = function(id) { var group = Find({ list: scope.home_groups, key: 'id', val: id }); GroupsCancelUpdate({ scope: scope, group: group }); @@ -564,7 +505,7 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio } -HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'LogViewer', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller', +HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller', 'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'GetHostsStatusMsg', 'GetSyncStatusMsg', 'ViewUpdateStatus', 'GroupsEdit', 'Wait', 'Alert', 'Rest', 'Empty', 'InventoryUpdate', 'Find', 'GroupsCancelUpdate', 'Store', 'Socket' ]; @@ -578,7 +519,7 @@ HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$l */ export function HomeHosts($scope, $location, $stateParams, HomeHostList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, - GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary, ViewJob) { + GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary) { ClearScope('htmlTemplate'); //Garbage collection. Don't leave behind any listeners/watchers from the prior //scope. @@ -647,10 +588,6 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat $scope.search(list.iterator); }; - $scope.viewJob = function(id) { - ViewJob({ scope: $scope, id: id }); - }; - $scope.toggleHostEnabled = function (id, sources) { ToggleHostEnabled({ host_id: id, @@ -687,5 +624,5 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat HomeHosts.$inject = ['$scope', '$location', '$stateParams', 'HomeHostList', 'generateList', 'ProcessErrors', 'ReturnToCaller', 'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'SetStatus', 'ToggleHostEnabled', 'HostsEdit', - 'Find', 'ShowJobSummary', 'ViewJob' + 'Find', 'ShowJobSummary' ]; diff --git a/awx/ui/client/src/controllers/Inventories.js b/awx/ui/client/src/controllers/Inventories.js index 916b812976..819baaea48 100644 --- a/awx/ui/client/src/controllers/Inventories.js +++ b/awx/ui/client/src/controllers/Inventories.js @@ -16,7 +16,7 @@ export function InventoriesList($scope, $rootScope, $location, $log, $stateParams, $compile, $filter, sanitizeFilter, Rest, Alert, InventoryList, generateList, Prompt, SearchInit, PaginateInit, ReturnToCaller, ClearScope, ProcessErrors, GetBasePath, Wait, - EditInventoryProperties, Find, Empty, LogViewer, $state) { + EditInventoryProperties, Find, Empty, $state) { var list = InventoryList, defaultUrl = GetBasePath('inventory'), @@ -295,10 +295,12 @@ export function InventoriesList($scope, $rootScope, $location, $log, }; $scope.viewJob = function(url) { - LogViewer({ - scope: $scope, - url: url - }); + + // Pull the id out of the URL + var id = url.replace(/^\//, '').split('/')[3]; + + $state.go('inventorySyncStdout', {id: id}); + }; $scope.editInventoryProperties = function (inventory_id) { @@ -364,7 +366,7 @@ export function InventoriesList($scope, $rootScope, $location, $log, InventoriesList.$inject = ['$scope', '$rootScope', '$location', '$log', '$stateParams', '$compile', '$filter', 'sanitizeFilter', 'Rest', 'Alert', 'InventoryList', 'generateList', 'Prompt', 'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope', 'ProcessErrors', - 'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', 'LogViewer', '$state' + 'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', '$state' ]; @@ -781,7 +783,7 @@ export function InventoriesManage ($log, $scope, $rootScope, $location, GetHostsStatusMsg, GroupsEdit, InventoryUpdate, GroupsCancelUpdate, ViewUpdateStatus, GroupsDelete, Store, HostsEdit, HostsDelete, EditInventoryProperties, ToggleHostEnabled, ShowJobSummary, - InventoryGroupsHelp, HelpDialog, ViewJob, + InventoryGroupsHelp, HelpDialog, GroupsCopy, HostsCopy, $stateParams) { var PreviousSearchParams, @@ -1254,12 +1256,8 @@ export function InventoriesManage ($log, $scope, $rootScope, $location, opts.autoShow = params.autoShow || false; } HelpDialog(opts); - }; - - $scope.viewJob = function(id) { - ViewJob({ scope: $scope, id: id }); - }; - + } +; $scope.showHosts = function (group_id, show_failures) { // Clicked on group if (group_id !== null) { @@ -1293,6 +1291,6 @@ InventoriesManage.$inject = ['$log', '$scope', '$rootScope', '$location', 'GroupsEdit', 'InventoryUpdate', 'GroupsCancelUpdate', 'ViewUpdateStatus', 'GroupsDelete', 'Store', 'HostsEdit', 'HostsDelete', 'EditInventoryProperties', 'ToggleHostEnabled', 'ShowJobSummary', - 'InventoryGroupsHelp', 'HelpDialog', 'ViewJob', 'GroupsCopy', + 'InventoryGroupsHelp', 'HelpDialog', 'GroupsCopy', 'HostsCopy', '$stateParams' ]; diff --git a/awx/ui/client/src/controllers/Projects.js b/awx/ui/client/src/controllers/Projects.js index d7f5786b6d..911ebcf356 100644 --- a/awx/ui/client/src/controllers/Projects.js +++ b/awx/ui/client/src/controllers/Projects.js @@ -15,7 +15,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, Rest, Alert, ProjectList, GenerateList, Prompt, SearchInit, PaginateInit, ReturnToCaller, ClearScope, ProcessErrors, GetBasePath, SelectionInit, ProjectUpdate, Refresh, Wait, GetChoices, Empty, - Find, LogViewer, GetProjectIcon, GetProjectToolTip, $filter, $state) { + Find, GetProjectIcon, GetProjectToolTip, $filter, $state) { ClearScope(); @@ -200,24 +200,19 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, $state.transitionTo('projects.edit', {id: id}); }; - if ($scope.removeShowLogViewer) { - $scope.removeShowLogViewer(); + if ($scope.removeGoToJobDetails) { + $scope.removeGoToJobDetails(); } - $scope.removeShowLogViewer = $scope.$on('ShowLogViewer', function(e, data) { - if (data.related.current_update) { + $scope.removeGoToJobDetails = $scope.$on('GoToJobDetails', function(e, data) { + if (data.summary_fields.current_update || data.summary_fields.last_update) { + Wait('start'); - LogViewer({ - scope: $scope, - url: data.related.current_update, - getIcon: GetProjectIcon - }); - } else if (data.related.last_update) { - Wait('start'); - LogViewer({ - scope: $scope, - url: data.related.last_update, - getIcon: GetProjectIcon - }); + + // Grab the id from summary_fields + var id = (data.summary_fields.current_update) ? data.summary_fields.current_update.id : data.summary_fields.last_update.id; + + $state.go('scmUpdateStdout', {id: id}); + } else { Alert('No Updates Available', 'There is no SCM update information available for this project. An update has not yet been ' + ' completed. If you have not already done so, start an update for this project.', 'alert-info'); @@ -235,7 +230,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, Rest.setUrl(project.url); Rest.get() .success(function(data) { - $scope.$emit('ShowLogViewer', data); + $scope.$emit('GoToJobDetails', data); }) .error(function(data, status) { ProcessErrors($scope, data, status, null, { hdr: 'Error!', @@ -374,7 +369,7 @@ ProjectsList.$inject = ['$scope', '$rootScope', '$location', '$log', 'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope', 'ProcessErrors', 'GetBasePath', 'SelectionInit', 'ProjectUpdate', 'Refresh', 'Wait', 'GetChoices', 'Empty', 'Find', - 'LogViewer', 'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state' + 'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state' ]; diff --git a/awx/ui/client/src/helpers.js b/awx/ui/client/src/helpers.js index 4b9c4ab548..0e47fbf52b 100644 --- a/awx/ui/client/src/helpers.js +++ b/awx/ui/client/src/helpers.js @@ -21,7 +21,6 @@ import JobTemplates from "./helpers/JobTemplates"; import Jobs from "./helpers/Jobs"; import License from "./helpers/License"; import LoadConfig from "./helpers/LoadConfig"; -import LogViewer from "./helpers/LogViewer"; import PaginationHelpers from "./helpers/PaginationHelpers"; import Parse from "./helpers/Parse"; import ProjectPath from "./helpers/ProjectPath"; @@ -58,7 +57,6 @@ export Jobs, License, LoadConfig, - LogViewer, PaginationHelpers, Parse, ProjectPath, diff --git a/awx/ui/client/src/helpers/EventViewer.js b/awx/ui/client/src/helpers/EventViewer.js index c56d7f05a4..cb075fa5e9 100644 --- a/awx/ui/client/src/helpers/EventViewer.js +++ b/awx/ui/client/src/helpers/EventViewer.js @@ -13,8 +13,8 @@ export default angular.module('EventViewerHelper', ['ModalDialog', 'Utilities', 'EventsViewerFormDefinition', 'HostsHelper']) - .factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'LookUpName', 'Empty', 'EventAddPreFormattedText', - function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, LookUpName, Empty, EventAddPreFormattedText) { + .factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'Empty', 'EventAddPreFormattedText', + function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, Empty, EventAddPreFormattedText) { return function(params) { var parent_scope = params.scope, url = params.url, diff --git a/awx/ui/client/src/helpers/Groups.js b/awx/ui/client/src/helpers/Groups.js index 920c4641bc..eeebb9d8bf 100644 --- a/awx/ui/client/src/helpers/Groups.js +++ b/awx/ui/client/src/helpers/Groups.js @@ -18,7 +18,7 @@ export default angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'GroupListDefinition', 'SearchHelper', 'PaginationHelpers', listGenerator.name, 'GroupsHelper', 'InventoryHelper', 'SelectionHelper', 'JobSubmissionHelper', 'RefreshHelper', 'PromptDialog', 'CredentialsListDefinition', 'InventoryTree', - 'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'LogViewerHelper', + 'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'StandardOutHelper', 'SchedulesHelper' ]) @@ -65,8 +65,8 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name * TODO: Document * */ -.factory('ViewUpdateStatus', ['Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find', 'LogViewer', - function (Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find, LogViewer) { +.factory('ViewUpdateStatus', ['$state', 'Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find', + function ($state, Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find) { return function (params) { var scope = params.scope, @@ -76,11 +76,13 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name if (scope.removeSourceReady) { scope.removeSourceReady(); } - scope.removeSourceReady = scope.$on('SourceReady', function(e, url) { - LogViewer({ - scope: scope, - url: url - }); + scope.removeSourceReady = scope.$on('SourceReady', function(e, source) { + + // Get the ID from the correct summary field + var update_id = (source.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id; + + $state.go('inventorySyncStdout', {id: update_id}); + }); if (group) { @@ -94,8 +96,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name Rest.setUrl(group.related.inventory_source); Rest.get() .success(function (data) { - var url = (data.related.current_update) ? data.related.current_update : data.related.last_update; - scope.$emit('SourceReady', url); + scope.$emit('SourceReady', data); }) .error(function (data, status) { ProcessErrors(scope, data, status, null, { hdr: 'Error!', diff --git a/awx/ui/client/src/helpers/Hosts.js b/awx/ui/client/src/helpers/Hosts.js index 517fee8a52..b9bde5fe2c 100644 --- a/awx/ui/client/src/helpers/Hosts.js +++ b/awx/ui/client/src/helpers/Hosts.js @@ -20,7 +20,7 @@ export default angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'HostListDefinition', 'SearchHelper', 'PaginationHelpers', listGenerator.name, 'HostsHelper', 'InventoryHelper', 'RelatedSearchHelper', 'InventoryFormDefinition', 'SelectionHelper', - 'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'LogViewerHelper', + 'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'StandardOutHelper', 'GroupListDefinition' ]) @@ -159,17 +159,6 @@ angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name, }; }]) -.factory('ViewJob', ['LogViewer', 'GetBasePath', function(LogViewer, GetBasePath) { - return function(params) { - var scope = params.scope, - id = params.id; - LogViewer({ - scope: scope, - url: GetBasePath('jobs') + id + '/' - }); - }; -}]) - .factory('HostsReload', [ '$stateParams', 'Empty', 'InventoryHosts', 'GetBasePath', 'SearchInit', 'PaginateInit', 'Wait', 'SetHostStatus', 'SetStatus', 'ApplyEllipsis', function($stateParams, Empty, InventoryHosts, GetBasePath, SearchInit, PaginateInit, Wait, SetHostStatus, SetStatus, diff --git a/awx/ui/client/src/helpers/Jobs.js b/awx/ui/client/src/helpers/Jobs.js index 6320259ade..dd7e83d57f 100644 --- a/awx/ui/client/src/helpers/Jobs.js +++ b/awx/ui/client/src/helpers/Jobs.js @@ -14,7 +14,7 @@ import listGenerator from '../shared/list-generator/main'; export default angular.module('JobsHelper', ['Utilities', 'RestServices', 'FormGenerator', 'JobSummaryDefinition', 'InventoryHelper', 'GeneratorHelpers', - 'JobSubmissionHelper', 'LogViewerHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name]) + 'JobSubmissionHelper', 'StandardOutHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name]) /** * JobsControllerInit({ scope: $scope }); @@ -22,8 +22,8 @@ export default * Initialize calling scope with all the bits required to support a jobs list * */ - .factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', 'LogViewer', '$window', - function($state, Find, DeleteJob, RelaunchJob, LogViewer, $window) { + .factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', '$window', + function($state, Find, DeleteJob, RelaunchJob, $window) { return function(params) { var scope = params.scope, iterator = (params.iterator) ? params.iterator : scope.iterator; diff --git a/awx/ui/client/src/helpers/LogViewer.js b/awx/ui/client/src/helpers/LogViewer.js deleted file mode 100644 index 90e9a3f407..0000000000 --- a/awx/ui/client/src/helpers/LogViewer.js +++ /dev/null @@ -1,390 +0,0 @@ -/************************************************* - * Copyright (c) 2015 Ansible, Inc. - * - * All Rights Reserved - *************************************************/ - - /** - * @ngdoc function - * @name helpers.function:LogViewer - * @description logviewer -*/ - -export default - angular.module('LogViewerHelper', ['ModalDialog', 'Utilities', 'FormGenerator', 'VariablesHelper']) - - .factory('LogViewer', ['$location', '$compile', 'CreateDialog', 'GetJob', 'Wait', 'GenerateForm', 'LogViewerStatusForm', 'AddTable', 'AddTextarea', - 'LogViewerOptionsForm', 'EnvTable', 'GetBasePath', 'LookUpName', 'Empty', 'AddPreFormattedText', 'ParseVariableString', 'GetChoices', - function($location, $compile, CreateDialog, GetJob, Wait, GenerateForm, LogViewerStatusForm, AddTable, AddTextarea, LogViewerOptionsForm, EnvTable, - GetBasePath, LookUpName, Empty, AddPreFormattedText, ParseVariableString, GetChoices) { - return function(params) { - var parent_scope = params.scope, - url = params.url, - getIcon = params.getIcon, - scope = parent_scope.$new(true), - base = $location.path().replace(/^\//, '').split('/')[0], - pieces; - - if (scope.removeModalReady) { - scope.removeModalReady(); - } - scope.removeModalReady = scope.$on('ModalReady', function() { - Wait('stop'); - $('#logviewer-modal-dialog').dialog('open'); - }); - - if (scope.removeJobReady) { - scope.removeJobReady(); - } - scope.removeJobReady = scope.$on('JobReady', function(e, data) { - var key, resizeText, elem; - $('#status-form-container').empty(); - $('#options-form-container').empty(); - $('#stdout-form-container').empty(); - $('#traceback-form-container').empty(); - $('#variables-container').empty(); - $('#source-container').empty(); - $('#logview-tabs li:eq(1)').hide(); - $('#logview-tabs li:eq(2)').hide(); - $('#logview-tabs li:eq(4)').hide(); - $('#logview-tabs li:eq(5)').hide(); - - // Make sure subsequenct scope references don't bubble up to the parent - for (key in LogViewerStatusForm.fields) { - scope[key] = ''; - } - for (key in LogViewerOptionsForm.fields) { - scope[key] = ''; - } - - for (key in data) { - scope[key] = data[key]; - } - scope.created_by = ''; - scope.job_template = ''; - - if (data.related.created_by) { - pieces = data.related.created_by.replace(/^\//,'').replace(/\/$/,'').split('/'); - scope.created_by = parseInt(pieces[pieces.length - 1],10); - LookUpName({ - scope: scope, - scope_var: 'created_by', - url: GetBasePath('users') + scope.created_by + '/' - }); - } - - // For jobs link the name to the job parent - if (base === 'jobs') { - if (data.type === 'job') { - scope.name_link = "job_template"; - scope.job_template = data.unified_job_template; - scope.job_template_name = (data.summary_fields.job_template) ? data.summary_fields.job_template.name : data.name; - scope.name_id = data.unified_job_template; - } - if (data.type === 'project_update') { - scope.name_link = "project"; - scope.name_id = data.unified_job_template; - } - if (data.type === 'inventory_update') { - scope.name_link = "inventory_source"; - scope.name_id = scope.group; - } - } - - AddTable({ scope: scope, form: LogViewerStatusForm, id: 'status-form-container', getIcon: getIcon }); - AddTable({ scope: scope, form: LogViewerOptionsForm, id: 'options-form-container', getIcon: getIcon }); - - if (data.result_stdout) { - $('#logview-tabs li:eq(1)').show(); - var showStandardOut = (data.type !== "system_job") ? true : false; - AddPreFormattedText({ - id: 'stdout-form-container', - val: data.result_stdout, - standardOut: showStandardOut, - jobUrl: data.url - }); - } - - if (data.result_traceback) { - $('#logview-tabs li:eq(2)').show(); - AddPreFormattedText({ - id: 'traceback-form-container', - val: data.result_traceback - }); - } - - /*if (data.job_env) { - EnvTable({ - id: 'env-form-container', - vars: data.job_env - }); - }*/ - - if (data.extra_vars) { - $('#logview-tabs li:eq(4)').show(); - AddTextarea({ - container_id: 'variables-container', - fld_id: 'variables', - val: ParseVariableString(data.extra_vars) - }); - } - - if (data.source_vars) { - $('#logview-tabs li:eq(5)').show(); - AddTextarea({ - container_id: 'source-container', - fld_id: 'source-variables', - val: ParseVariableString(data.source_vars) - }); - } - - if (!Empty(scope.source)) { - if (scope.removeChoicesReady) { - scope.removeChoicesReady(); - } - scope.removeChoicesReady = scope.$on('ChoicesReady', function() { - scope.source_choices.every(function(e) { - if (e.value === scope.source) { - scope.source = e.label; - return false; - } - return true; - }); - }); - GetChoices({ - scope: scope, - url: GetBasePath('inventory_sources'), - field: 'source', - variable: 'source_choices', - choice_name: 'choices', - callback: 'ChoicesReady' - }); - } - - if (!Empty(scope.credential)) { - LookUpName({ - scope: scope, - scope_var: 'credential', - url: GetBasePath('credentials') + scope.credential + '/' - }); - } - - if (!Empty(scope.inventory)) { - LookUpName({ - scope: scope, - scope_var: 'inventory', - url: GetBasePath('inventory') + scope.inventory + '/' - }); - } - - if (!Empty(scope.project)) { - LookUpName({ - scope: scope, - scope_var: 'project', - url: GetBasePath('projects') + scope.project + '/' - }); - } - - if (!Empty(scope.cloud_credential)) { - LookUpName({ - scope: scope, - scope_var: 'cloud_credential', - url: GetBasePath('credentials') + scope.cloud_credential + '/' - }); - } - - if (!Empty(scope.inventory_source)) { - LookUpName({ - scope: scope, - scope_var: 'inventory_source', - url: GetBasePath('inventory_sources') + scope.inventory_source + '/' - }); - } - - resizeText = function() { - var u = $('#logview-tabs').outerHeight() + 25, - h = $('#logviewer-modal-dialog').innerHeight(), - rows = Math.floor((h - u) / 20); - rows -= 3; - rows = (rows < 6) ? 6 : rows; - $('#logviewer-modal-dialog #variables').attr({ rows: rows }); - $('#logviewer-modal-dialog #source-variables').attr({ rows: rows }); - }; - - elem = angular.element(document.getElementById('logviewer-modal-dialog')); - $compile(elem)(scope); - - CreateDialog({ - scope: scope, - width: 600, - height: 550, - minWidth: 450, - callback: 'ModalReady', - id: 'logviewer-modal-dialog', - onResizeStop: resizeText, - title: 'Job Results', - onOpen: function() { - $('#logview-tabs a:first').tab('show'); - $('#dialog-ok-button').focus(); - resizeText(); - } - }); - }); - - GetJob({ - url: url, - scope: scope - }); - - scope.modalOK = function() { - $('#logviewer-modal-dialog').dialog('close'); - scope.$destroy(); - }; - }; - }]) - - .factory('GetJob', ['Rest', 'ProcessErrors', function(Rest, ProcessErrors) { - return function(params) { - var url = params.url, - scope = params.scope; - Rest.setUrl(url); - Rest.get() - .success(function(data){ - scope.$emit('JobReady', data); - }) - .error(function(data, status) { - ProcessErrors(scope, data, status, null, { hdr: 'Error!', - msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); - }); - }; - }]) - - .factory('LookUpName', ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { - return function(params) { - var url = params.url, - scope_var = params.scope_var, - scope = params.scope; - Rest.setUrl(url); - Rest.get() - .success(function(data) { - if (scope_var === 'inventory_source') { - scope[scope_var + '_name'] = data.summary_fields.group.name; - } - else if (!Empty(data.name)) { - scope[scope_var + '_name'] = data.name; - } - if (!Empty(data.group)) { - // Used for inventory_source - scope.group = data.group; - } - }) - .error(function(data, status) { - ProcessErrors(scope, data, status, null, { hdr: 'Error!', - msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); - }); - }; - }]) - - .factory('AddTable', ['$compile', 'Empty', 'Find', function($compile, Empty, Find) { - return function(params) { - var form = params.form, - id = params.id, - scope = params.scope, - getIcon = params.getIcon, - fld, html, url, e, - urls = [ - { "variable": "credential", "url": "/#/credentials/" }, - { "variable": "project", "url": "/#/projects/" }, - { "variable": "inventory", "url": "/#/inventories/" }, - { "variable": "cloud_credential", "url": "/#/credentials/" }, - { "variable": "inventory_source", "url": "/#/home/groups/?id={{ group }}" }, - { "variable": "job_template", "url": "/#/job_templates/" }, - { "variable": "created_by", "url": "/#/users/" } - ]; - html = "\n"; - for (fld in form.fields) { - if (!Empty(scope[fld])) { - html += "" + - "\n"; - } - } - html += "
" + form.fields[fld].label + ""; - url = Find({ list: urls, key: "variable", val: fld }); - if (url) { - html += "{{ " + fld + '_name' + " }}"; - } - else if (fld === 'name' && scope.name_link) { - url = Find({ list: urls, key: "variable", val: scope.name_link }); - html += "{{ " + - ( (scope.name_link === 'inventory_source') ? 'inventory_source_name' : fld ) + " }}"; - } - else if (fld === 'elapsed') { - html += scope[fld] + " seconds"; - } - else if (fld === 'status') { - if (getIcon) { - html += " " + scope[fld]; - } - else { - html += " " + scope[fld]; - } - if (scope.job_explanation) { - html += "

" + scope.job_explanation + "

"; - } - } - else { - html += "{{ " + fld ; - html += (form.fields[fld].filter) ? " | " + form.fields[fld].filter : "" ; - html += " }}"; - } - html += "
\n"; - e = angular.element(document.getElementById(id)); - e.empty().html(html); - $compile(e)(scope); - }; - }]) - - .factory('AddTextarea', [ function() { - return function(params) { - var container_id = params.container_id, - val = params.val, - fld_id = params.fld_id, - html; - html = "
\n" + - "" + - "
\n"; - $('#' + container_id).empty().html(html); - }; - }]) - - .factory('AddPreFormattedText', ['$rootScope', function($rootScope) { - return function(params) { - var id = params.id, - val = params.val, - html = ""; - if (params.standardOut) { - html += 'Download'; - html += "
" + val + "
\n"; - } else { - html += "
" + val + "
\n"; - } - $('#' + id).empty().html(html); - }; - }]) - - .factory('EnvTable', [ function() { - return function(params) { - var id = params.id, - vars = params.vars, - key, html; - html = "\n"; - for (key in vars) { - html += "" + - "\n"; - } - html += "
" + key + "" + vars[key] + "
\n"; - $('#' + id).empty().html(html); - }; - }]); diff --git a/awx/ui/client/src/standard-out/main.js b/awx/ui/client/src/standard-out/main.js index e8a0946e82..b0aafe40ad 100644 --- a/awx/ui/client/src/standard-out/main.js +++ b/awx/ui/client/src/standard-out/main.js @@ -9,8 +9,9 @@ import stdoutManagementJobsRoute from './management-jobs/standard-out-management import stdoutInventorySyncRoute from './inventory-sync/standard-out-inventory-sync.route'; import stdoutScmUpdateRoute from './scm-update/standard-out-scm-update.route'; import {JobStdoutController} from './standard-out.controller'; +import StandardOutHelper from './standard-out-factories/main'; -export default angular.module('standardOut', []) +export default angular.module('standardOut', [StandardOutHelper.name]) .controller('JobStdoutController', JobStdoutController) .run(['$stateExtender', function($stateExtender) { $stateExtender.addState(stdoutAdhocRoute); diff --git a/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js b/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js new file mode 100644 index 0000000000..f097a945be --- /dev/null +++ b/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js @@ -0,0 +1,32 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + + export default + ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { + return function(params) { + var url = params.url, + scope_var = params.scope_var, + scope = params.scope; + Rest.setUrl(url); + Rest.get() + .success(function(data) { + if (scope_var === 'inventory_source') { + scope[scope_var + '_name'] = data.summary_fields.group.name; + } + else if (!Empty(data.name)) { + scope[scope_var + '_name'] = data.name; + } + if (!Empty(data.group)) { + // Used for inventory_source + scope.group = data.group; + } + }) + .error(function(data, status) { + ProcessErrors(scope, data, status, null, { hdr: 'Error!', + msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); + }); + }; + }]; diff --git a/awx/ui/client/src/standard-out/standard-out-factories/main.js b/awx/ui/client/src/standard-out/standard-out-factories/main.js new file mode 100644 index 0000000000..fdded8ab31 --- /dev/null +++ b/awx/ui/client/src/standard-out/standard-out-factories/main.js @@ -0,0 +1,11 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +import lookUpName from './lookup-name.factory'; + +export default + angular.module('StandardOutHelper', []) + .factory('LookUpName', lookUpName); diff --git a/awx/ui/client/src/standard-out/standard-out.controller.js b/awx/ui/client/src/standard-out/standard-out.controller.js index e0ac1362e4..1b9233a248 100644 --- a/awx/ui/client/src/standard-out/standard-out.controller.js +++ b/awx/ui/client/src/standard-out/standard-out.controller.js @@ -195,7 +195,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi return true; }); }); - // GetChoices can be found in the helper: LogViewer.js + // GetChoices can be found in the helper: StandardOut.js // It attaches the source choices to $scope.source_choices. // Then, when the callback is fired, $scope.source is bound // to the corresponding label. @@ -209,7 +209,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi }); } - // LookUpName can be found in the helper: LogViewer.js + // LookUpName can be found in the helper: StandardOut.js // It attaches the name that it gets (based on the url) // to the $scope variable defined by the attribute scope_var. if (!Empty(data.credential)) { diff --git a/docs/licenses/django-jsonbfield.txt b/docs/licenses/django-jsonbfield.txt new file mode 100644 index 0000000000..5f4f225dd2 --- /dev/null +++ b/docs/licenses/django-jsonbfield.txt @@ -0,0 +1,27 @@ +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/notification_system.md b/docs/notification_system.md new file mode 100644 index 0000000000..fc8f99b9ee --- /dev/null +++ b/docs/notification_system.md @@ -0,0 +1,187 @@ +Completion pending unit tests and acceptance info and instructions. The following documentation will likely be moved to the feature epic card and reproduced in our development documentation. + +# Notification System Overview + +A Notifier is an instance of a notification type (Email, Slack, Webhook, etc) with a name, description, and a defined configuration (A few examples: Username, password, server, recipients for the Email type. Token and list of channels for Slack. Url and Headers for webhooks) + +A Notification is a manifestation of the Notifier... for example, when a job fails a notification is sent using the configuration defined by the Notifier. + +This PR implements the Notification system as outlined in the 3.0 Notifications spec. At a high level the typical flow is: + +* User creates a Notifier at `/api/v1/notifiers` +* User assigns the notifier to any of the various objects that support it (all variants of job templates as well as organizations and projects) and at the appropriate trigger level for which they want the notification (error, success, or any). For example a user may wish to assign a particular Notifier to trigger when `Job Template 1` fails. In which case they will associate the notifier with the job template at `/api/v1/job_templates/n/notifiers_error`. + +## Notifier hierarchy + +Notifiers assigned at certain levels will inherit notifiers defined on parent objects as such: + +* Job Templates will use notifiers defined on it as well as inheriting notifiers from the Project used by the Job Template and from the Organization that it is listed under (via the Project). +* Project Updates will use notifiers defined on the project and will inherit notifiers from the Organization associated with it. +* Inventory Updates will use notifiers defined on the Organization that it is listed under +* Ad-hoc commands will use notifiers defined on the Organization that the inventory is associated with + +## Workflow + +When a job succeeds or fails, the error or success handler will pull a list of relevant notifiers using the procedure defined above. It will then create a Notification object for each one containing relevant details about the job and then **send**s it to the destination (email addresses, slack channel(s), sms numbers, etc). These Notification objects are available as related resources on job types (jobs, inventory updates, project updates), and also at `/api/v1/notifications`. You may also see what notifications have been sent from a notifier by examining its related resources. + +Notifications can succeed or fail but that will not cause its associated job to succeed or fail. The status of the notification can be viewed at its detail endpoint `/api/v1/notifications/` + +## Testing Notifiers before using them + +Once a Notifier is created its configuration can be tested by utilizing the endpoint at `/api/v1/notifiers//test` This will emit a test notification given the configuration defined by the Notifier. These test notifications will also appear in the notifications list at `/api/v1/notifications` + +# Notification Types + +The currently defined Notification Types are: + +* Email +* Slack +* Hipchat +* Pagerduty +* Twilio +* IRC +* Webhook + +Each of these have their own configuration and behavioral semantics and testing them may need to be approached in different ways. The following sections will give as much detail as possible. + +## Email + +The email notification type supports a wide variety of smtp servers and has support for ssl/tls connections. + +### Testing considerations + +The following should be performed for good acceptance: + +* Test plain authentication +* Test SSL and TLS authentication +* Verify single and multiple recipients +* Verify message subject and contents are formatted sanely. They should be plaintext but readable. + +### Test Service + +Either setup a local smtp mail service here are some options: + +* postfix service on galaxy: https://galaxy.ansible.com/debops/postfix/ +* Mailtrap has a good free plan and should provide all of the features we need under that plan: https://mailtrap.io/ + +## Slack + +Slack is pretty easy to configure, it just needs a token which you can get from creating a bot in the integrations settings for the slack team. + +### Testing considerations + +The following should be performed for good acceptance: + +* Test single and multiple channels and good formatting of the message. Note that slack notifications only contain the minimal information + +### Test Service + +Any user of the Ansible slack service can create a bot integration (which is how this notification is implemented). Remember to invite the bot to the channel first. + +## Hipchat + +There are several ways to integrate with hipchat. The Tower implementation uses Hipchat "Integrations". Currently you can find this at the bottom right of the main hipchat webview. From there you will select "Build your own Integration". After creating that it will list the `auth_token` that needs to be supplied to Tower. Some other relevant details on the fields accepted by Tower for the Hipchat notification type: + +* `color`: This will highlight the message as the given color. If set to something hipchat doesn't expect then the notification will generate an error, but it's pretty rad. I like green personally. +* `notify`: Selecting this will cause the bot to "notify" channel members. Normally it will just be stuck as a message in the chat channel without triggering anyone's notifications. This option will notify users of the channel respecting their existing notification settings (browser notification, email fallback, etc.) +* `message_from`: Along with the integration name itself this will put another label on the notification. I reckon this would be helpful if multiple services are using the same integration to distinguish them from each other. +* `api_url`: The url of the hipchat api service. If you create a team hosted by them it'll be something like `https://team.hipchat.com`. For a self-hosted service it'll be the http url that is accessible by Tower. + +### Testing considerations + +* Make sure all options behave as expected +* Test single and multiple channels +* Test that notification preferences are obeyed. +* Test formatting and appearance. Note that, like Slack, hipchat will use the minimal version of the notification. +* Test standalone hipchat service for parity with hosted solution + +### Test Service + +Hipchat allows you to create a team with limited users and message history for free, which is easy to set up and get started with. Hipchat contains a self-hosted server also which we should test for parity... it has a 30 day trial but there might be some other way to negotiate with them, redhat, or ansible itself: + +https://www.hipchat.com/server + +## Pagerduty + +Pager duty is a fairly straightforward integration. The user will create an API Key in the pagerduty system (this will be the token that is given to Tower) and then create a "Service" which will provide an "Integration Key" that will be given to Tower also. The other options of note are: + +* `subdomain`: When you sign up for the pagerduty account you will get a unique subdomain to communicate with. For instance, if you signed up as "towertest" the web dashboard will be at towertest.pagerduty.com and you will give the Tower API "towertest" as the subdomain (not the full domain). +* `client_name`: This will be sent along with the alert content to the pagerduty service to help identify the service that is using the api key/service. This is helpful if multiple integrations are using the same api key and service. + +### Testing considerations + +* Make sure the alert lands on the pagerduty service +* Verify that the minimal information is displayed for the notification but also that the detail of the notification contains all fields. Pagerduty itself should understand the format in which we send the detail information. + +### Test Service + +Pagerduty allows you to sign up for a free trial with the service. We may also have a ansible-wide pagerduty service that we could tie into for other things. + +## Twilio + +Twilio service is an Voice and SMS automation service. Once you are signed in you'll need to create a phone number from which the message will be sent. You'll then define a "Messaging Service" under Programmable SMS and associate the number you created before with it. Note that you may need to verify this number or some other information before you are allowed to use it to send to any numbers. The Messaging Service does not need a status callback url nor does it need the ability to Process inbound messages. + +Under your individual (or sub) account settings you will have API credentials. The Account SID and AuthToken are what will be given to Tower. There are a couple of other important fields: + +* `from_number`: This is the number associated with the messaging service above and must be given in the form of "+15556667777" +* `to_numbers`: This will be the list of numbers to receive the SMS and should be the 10-digit phone number. + +### Testing considerations + +* Test notifications with single and multiple recipients +* Verify that the minimal information is displayed for the notification. Note that this notification type does not display the full detailed notification. + +### Test Service + +Twilio is fairly straightforward to sign up for but I don't believe it has a free plan, a credit card will be needed to sign up for it though the charges are fairly minimal per message. + +## IRC + +The Tower irc notification takes the form of an IRC bot that will connect, deliver its messages to channel(s) or individual user(s), and then disconnect. The Tower notification bot also supports SSL authentication. The Tower bot does not currently support Nickserv identification. If a channel or user does not exist or is not on-line then the Notification will not fail, the failure scenario is reserved specifically for connectivity. + +Connectivity information is straightforward: + +* `server`: The host name or address of the irc server +* `port`: The irc server port +* `nickname`: The bot's nickname once it connects to the server +* `password`: IRC servers can require a password to connect. If the server doesn't require one then this should be an empty string +* `use_ssl`: Should the bot use SSL when connecting +* `targets`: A list of users and/or channels to send the notification to. + +### Test Considerations + +* Test both plain and SSL connectivity +* Test single and multiples of both users and channels. + +### Test Service + +There are a few modern irc servers to choose from but we should use a fairly full featured service to get good test coverage. I recommend inspircd because it is actively maintained and pretty straightforward to configure. + +## Webhook + +The webhook notification type in Ansible Tower provides a simple interface to sending POSTs to a predefined web service. Tower will POST to this address using `application/json` content type with the data payload containing all relevant details in json format. +The parameters are pretty straightforward: + +* `url`: The full url that will be POSTed to +* `headers`: Headers in json form where the keys and values are strings. For example: `{"Authentication": "988881adc9fc3655077dc2d4d757d480b5ea0e11", "MessageType": "Test"}` + +### Test Considerations + +* Test HTTP service and HTTPS, also specifically test HTTPS with a self signed cert. +* Verify that the headers and payload are present and that the payload is json and the content type is specifically `application/json` + +### Test Service + +A very basic test can be performed by using `netcat`: + +``` +netcat -l 8099 +``` + +and then sending the request to: http://\:8099 + +Note that this won't respond correctly to the notification so it will yield an error. I recommend using a very basic Flask application for verifying the POST request, you can see an example of mine here: + +https://gist.github.com/matburt/73bfbf85c2443f39d272 + +This demonstrates how to define an endpoint and parse headers and json content, it doesn't show configuring Flask for HTTPS but this is also pretty straightforward: http://flask.pocoo.org/snippets/111/ diff --git a/pytest.ini b/pytest.ini index 90f45f0b2a..748c2919fd 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,3 +4,5 @@ python_paths = awx/lib/site-packages site_dirs = awx/lib/site-packages python_files = *.py addopts = --reuse-db +markers = + ac: access control test diff --git a/requirements/requirements.txt b/requirements/requirements.txt index edf8f7ad74..254d3c0237 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -21,6 +21,7 @@ django-auth-ldap==1.2.6 django-celery==3.1.17 django-crum==0.6.1 django-extensions==1.5.9 +git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=django-jsonbfield django-polymorphic==0.7.2 django-radius==1.0.0 djangorestframework==3.3.2 @@ -48,6 +49,7 @@ idna==2.0 importlib==1.0.3 ip-associations-python-novaclient-ext==0.1 ipaddress==1.0.16 +irc==13.3.1 iso8601==0.1.11 isodate==0.5.1 jsonpatch==1.12 @@ -84,6 +86,7 @@ psycopg2 pyasn1==0.1.9 pycrypto==2.6.1 pycparser==2.14 +pygerduty==0.32.1 PyJWT==1.4.0 pymongo==2.8 pyOpenSSL==0.15.1 @@ -121,11 +124,13 @@ requestsexceptions==1.1.1 shade==1.4.0 simplejson==3.8.1 six==1.9.0 +slackclient==0.16 statsd==3.2.1 stevedore==1.10.0 suds==0.4 unicodecsv==0.14.1 warlock==1.2.0 +twilio==4.9.1 wheel==0.24.0 wrapt==1.10.6 wsgiref==0.1.2 diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 0e1fcaa1ba..9131465b10 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -7,3 +7,4 @@ pytest pytest-cov pytest-django pytest-pythonpath +pytest-mock diff --git a/requirements/requirements_jenkins.txt b/requirements/requirements_jenkins.txt index 7ea9c8642f..b1cdafc250 100644 --- a/requirements/requirements_jenkins.txt +++ b/requirements/requirements_jenkins.txt @@ -13,3 +13,4 @@ pytest pytest-cov pytest-django pytest-pythonpath +pytest-mock diff --git a/tools/docker-compose/start_development.sh b/tools/docker-compose/start_development.sh index 1ade0e0cf8..391df450dc 100755 --- a/tools/docker-compose/start_development.sh +++ b/tools/docker-compose/start_development.sh @@ -1,4 +1,5 @@ #!/bin/bash +set +x # Wait for the databases to come up ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all