1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-28 02:25:27 +03:00

Merge branch 'devel' into channels-deployment

This commit is contained in:
Wayne Witzel III 2016-10-14 13:33:51 -04:00
commit 1d677dad91
30 changed files with 362 additions and 71 deletions

View File

@ -6,6 +6,7 @@ recursive-include awx/ui/templates *.html
recursive-include awx/ui/static *
recursive-include awx/playbooks *.yml
recursive-include awx/lib/site-packages *
recursive-include awx/plugins *.ps1
recursive-include requirements *.txt
recursive-include config *
recursive-include docs/licenses *

View File

@ -357,12 +357,16 @@ dbshell:
sudo -u postgres psql -d awx-dev
server_noattach:
tmux new-session -d -s tower 'exec make runserver'
tmux new-session -d -s tower 'exec make uwsgi'
tmux rename-window 'Tower'
tmux select-window -t tower:0
tmux split-window -v 'exec make celeryd'
tmux new-window 'exec make receiver'
tmux new-window 'exec make daphne'
tmux select-window -t tower:1
tmux rename-window 'WebSockets'
tmux split-window -h 'exec make runworker'
tmux new-window 'exec make receiver'
tmux select-window -t tower:2
tmux rename-window 'Extra Services'
tmux split-window -h 'exec make factcacher'
@ -387,6 +391,24 @@ flower:
fi; \
$(PYTHON) manage.py celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
uwsgi:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
uwsgi --socket :8050 --module=awx.wsgi:application --home=/venv/tower --chdir=/tower_devel/ --vacuum --processes=5 --harakiri=60 --static-map /static=/tower_devel/awx/public/static
daphne:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
daphne -b 0.0.0.0 -p 8051 awx.asgi:channel_layer
runworker:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
$(PYTHON) manage.py runworker --only-channels websocket.*
# Run the built-in development webserver (by default on http://localhost:8013).
runserver:
@if [ "$(VENV_BASE)" ]; then \
@ -519,10 +541,10 @@ release_build:
# Build setup tarball
tar-build/$(SETUP_TAR_FILE):
@mkdir -p tar-build
@cp -a setup tar-build/$(SETUP_TAR_NAME)
@rsync -az --exclude /test setup/ tar-build/$(SETUP_TAR_NAME)
@rsync -az docs/licenses tar-build/$(SETUP_TAR_NAME)/
@cd tar-build/$(SETUP_TAR_NAME) && sed -e 's#%NAME%#$(NAME)#;s#%VERSION%#$(VERSION)#;s#%RELEASE%#$(RELEASE)#;' group_vars/all.in > group_vars/all
@cd tar-build && tar -czf $(SETUP_TAR_FILE) --exclude "*/all.in" --exclude "**/test/*" $(SETUP_TAR_NAME)/
@cd tar-build && tar -czf $(SETUP_TAR_FILE) --exclude "*/all.in" $(SETUP_TAR_NAME)/
@ln -sf $(SETUP_TAR_FILE) tar-build/$(SETUP_TAR_LINK)
tar-build/$(SETUP_TAR_CHECKSUM):
@ -559,7 +581,7 @@ setup-bundle-build:
# TODO - Somehow share implementation with setup_tarball
setup-bundle-build/$(OFFLINE_TAR_FILE):
cp -a setup setup-bundle-build/$(OFFLINE_TAR_NAME)
rsync -az --exclude /test setup/ setup-bundle-build/$(OFFLINE_TAR_NAME)
rsync -az docs/licenses setup-bundle-build/$(OFFLINE_TAR_NAME)/
cd setup-bundle-build/$(OFFLINE_TAR_NAME) && sed -e 's#%NAME%#$(NAME)#;s#%VERSION%#$(VERSION)#;s#%RELEASE%#$(RELEASE)#;' group_vars/all.in > group_vars/all
$(PYTHON) $(DEPS_SCRIPT) -d $(DIST) -r $(DIST_MAJOR) -u $(AW_REPO_URL) -s setup-bundle-build/$(OFFLINE_TAR_NAME) -v -v -v
@ -753,7 +775,7 @@ docker-auth:
# Docker Compose Development environment
docker-compose: docker-auth
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate nginx tower
docker-compose-cluster: docker-auth
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml up

View File

@ -1,5 +1,7 @@
runserver: make runserver
runworker: make runworker
daphne: make daphne
celeryd: make celeryd
receiver: make receiver
factcacher: make factcacher
flower: make flower
uwsgi: make uwsgi

View File

@ -104,6 +104,7 @@ class APIView(views.APIView):
logger.warn(status_msg)
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
time_started = getattr(self, 'time_started', None)
response['X-API-Node'] = settings.CLUSTER_HOST_ID
if time_started:
time_elapsed = time.time() - self.time_started
response['X-API-Time'] = '%0.3fs' % time_elapsed

View File

@ -1920,13 +1920,14 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
ask_job_type_on_launch = serializers.ReadOnlyField()
ask_inventory_on_launch = serializers.ReadOnlyField()
ask_credential_on_launch = serializers.ReadOnlyField()
artifacts = serializers.SerializerMethodField()
class Meta:
model = Job
fields = ('*', 'job_template', 'passwords_needed_to_start', 'ask_variables_on_launch',
'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch',
'ask_job_type_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',
'allow_simultaneous',)
'allow_simultaneous', 'artifacts',)
def get_related(self, obj):
res = super(JobSerializer, self).get_related(obj)
@ -1949,6 +1950,11 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
res['relaunch'] = reverse('api:job_relaunch', args=(obj.pk,))
return res
def get_artifacts(self, obj):
if obj:
return obj.display_artifacts()
return {}
def to_internal_value(self, data):
# When creating a new job and a job template is specified, populate any
# fields not provided in data from the job template.
@ -2232,11 +2238,14 @@ class WorkflowNodeBaseSerializer(BaseSerializer):
job_tags = serializers.SerializerMethodField()
limit = serializers.SerializerMethodField()
skip_tags = serializers.SerializerMethodField()
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
fields = ('id', 'url', 'related', 'unified_job_template',
fields = ('*', '-name', '-description', 'id', 'url', 'related',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
'inventory', 'credential', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags')
read_only_fields = ('success_nodes', 'failure_nodes', 'always_nodes')
def get_related(self, obj):
res = super(WorkflowNodeBaseSerializer, self).get_related(obj)

View File

@ -6,9 +6,10 @@ from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env, MODE
prepare_env()
prepare_env() # NOQA
from django.core.wsgi import get_wsgi_application # NOQA
from channels.asgi import get_channel_layer
"""
ASGI config for AWX project.
@ -29,7 +30,6 @@ if MODE == 'production':
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
from channels.asgi import get_channel_layer
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")

View File

@ -20,8 +20,14 @@ logger = logging.getLogger('awx.conf.fields')
class StringListField(ListField):
child = CharField()
def to_representation(self, value):
if value is None and self.allow_null:
return None
return super(StringListField, self).to_representation(value)
class URLField(CharField):

View File

@ -22,6 +22,8 @@ def copy_tower_settings(apps, schema_editor):
setting, created = Setting.objects.get_or_create(
key=tower_setting.key,
user=tower_setting.user,
created=tower_setting.created,
modified=tower_setting.modified,
defaults=dict(value=value),
)
if not created and setting.value != value:

View File

@ -191,6 +191,10 @@ class SettingsWrapper(UserSettingsHolder):
def _get_default(self, name):
return getattr(self.default_settings, name)
@property
def SETTINGS_MODULE(self):
return self._get_default('SETTINGS_MODULE')
def __getattr__(self, name):
value = empty
if name in self._get_supported_settings():

View File

@ -97,6 +97,8 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
settings_qs = self.get_queryset()
user = self.request.user if self.category_slug == 'user' else None
for key, value in serializer.validated_data.items():
if key == 'LICENSE':
continue
setattr(serializer.instance, key, value)
# Always encode "raw" strings as JSON.
if isinstance(value, basestring):
@ -114,7 +116,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_destroy(self, instance):
for setting in self.get_queryset():
for setting in self.get_queryset().exclude(key='LICENSE'):
setting.delete()

View File

@ -1277,6 +1277,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
qs = self.model.objects.filter(
workflow_job_template__in=WorkflowJobTemplate.accessible_objects(
self.user, 'read_role'))
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
return qs
def can_use_prompted_resources(self, data):
@ -1371,6 +1372,8 @@ class WorkflowJobNodeAccess(BaseAccess):
qs = self.model.objects.filter(
workflow_job__workflow_job_template__in=WorkflowJobTemplate.accessible_objects(
self.user, 'read_role'))
qs = qs.select_related('unified_job_template', 'job')
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
return qs
def can_add(self, data):

View File

@ -0,0 +1,27 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from optparse import make_option
from awx.main.models import Instance
class Command(BaseCommand):
"""
Deprovision a Tower cluster node
"""
option_list = BaseCommand.option_list + (
make_option('--name', dest='name', type='string',
help='Hostname used during provisioning'),
)
def handle(self, **options):
# Get the instance.
instance = Instance.objects.filter(hostname=options.get('name'))
if instance.exists():
instance.delete()
print('Successfully removed')
else:
print('No instance found matching name {}'.format(options.get('name')))

View File

@ -4,27 +4,26 @@
from awx.main.models import Instance
from django.conf import settings
from django.core.management.base import CommandError, NoArgsCommand
from optparse import make_option
from django.core.management.base import BaseCommand
class Command(NoArgsCommand):
class Command(BaseCommand):
"""
Internal tower command.
Regsiter this instance with the database for HA tracking.
"""
option_list = NoArgsCommand.option_list + (
option_list = BaseCommand.option_list + (
make_option('--hostname', dest='hostname', type='string',
help='Hostname used during provisioning')
help='Hostname used during provisioning'),
)
def handle(self, *args, **options):
super(Command, self).handle(**options)
def handle(self, **options):
uuid = settings.SYSTEM_UUID
instance = Instance.objects.filter(hostname=options.get('hostname'))
if instance.exists():
print("Instance already registered %s" % instance_str(instance[0]))
print("Instance already registered {}".format(instance[0]))
return
instance = Instance(uuid=uuid, hostname=options.get('hostname'))
instance.save()
print('Successfully registered instance %s.' % instance_str(instance))
print('Successfully registered instance {}'.format(instance))

View File

@ -4,6 +4,7 @@
# Python
import datetime
import logging
import json
from kombu import Connection, Exchange, Queue
from kombu.mixins import ConsumerMixin
@ -80,6 +81,7 @@ class CallbackBrokerWorker(ConsumerMixin):
event_uuid = payload.get("uuid", '')
parent_event_uuid = payload.get("parent_uuid", '')
artifact_data = payload.get("artifact_data", None)
# Sanity check: Don't honor keys that we don't recognize.
for key in payload.keys():
@ -123,6 +125,23 @@ class CallbackBrokerWorker(ConsumerMixin):
except DatabaseError as e:
logger.error("Database Error Saving Job Event: {}".format(e))
if artifact_data:
try:
self.process_artifacts(artifact_data, res, payload)
except DatabaseError as e:
logger.error("Database Error Saving Job Artifacts: {}".format(e))
def process_artifacts(self, artifact_data, res, payload):
artifact_dict = json.loads(artifact_data)
if res and isinstance(res, dict):
if res.get('_ansible_no_log', False):
artifact_dict['_ansible_no_log'] = True
if artifact_data is not None:
parent_job = Job.objects.filter(pk=payload['job_id']).first()
if parent_job is not None and parent_job.artifacts != artifact_dict:
parent_job.artifacts = artifact_dict
parent_job.save(update_fields=['artifacts'])
class Command(NoArgsCommand):
'''

View File

@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0039_v310_channelgroup'),
]
operations = [
migrations.AddField(
model_name='job',
name='artifacts',
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
),
migrations.AddField(
model_name='workflowjobnode',
name='ancestor_artifacts',
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
),
]

View File

@ -550,6 +550,11 @@ class Job(UnifiedJob, JobOptions, JobNotificationMixin):
default={},
editable=False,
)
artifacts = JSONField(
blank=True,
default={},
editable=False,
)
@classmethod
def _get_parent_field_name(cls):
@ -775,6 +780,15 @@ class Job(UnifiedJob, JobOptions, JobNotificationMixin):
else:
return self.extra_vars
def display_artifacts(self):
'''
Hides artifacts if they are marked as no_log type artifacts.
'''
artifacts = self.artifacts
if artifacts.get('_ansible_no_log', False):
return "$hidden due to Ansible no_log flag$"
return artifacts
def _survey_search_and_replace(self, content):
# Use job template survey spec to identify password fields.
# Then lookup password fields in extra_vars and save the values

View File

@ -33,7 +33,7 @@ from djcelery.models import TaskMeta
from awx.main.models.base import * # noqa
from awx.main.models.schedules import Schedule
from awx.main.utils import decrypt_field, _inventory_updates
from awx.main.redact import UriCleaner
from awx.main.redact import UriCleaner, REPLACE_STR
from awx.main.consumers import emit_channel_notification
__all__ = ['UnifiedJobTemplate', 'UnifiedJob']
@ -348,11 +348,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
create_kwargs[field_name] = getattr(self, field_name)
new_kwargs = self._update_unified_job_kwargs(**create_kwargs)
unified_job = unified_job_class(**new_kwargs)
# For JobTemplate-based jobs with surveys, save list for perma-redaction
if (hasattr(self, 'survey_spec') and getattr(self, 'survey_enabled', False) and
not getattr(unified_job, 'survey_passwords', False)):
# For JobTemplate-based jobs with surveys, add passwords to list for perma-redaction
if hasattr(self, 'survey_spec') and getattr(self, 'survey_enabled', False):
password_list = self.survey_password_variables()
hide_password_dict = {}
hide_password_dict = getattr(unified_job, 'survey_passwords', {})
for password in password_list:
hide_password_dict[password] = REPLACE_STR
unified_job.survey_passwords = hide_password_dict

View File

@ -21,9 +21,9 @@ from awx.main.models.rbac import (
)
from awx.main.fields import ImplicitRoleField
from awx.main.models.mixins import ResourceMixin
from awx.main.redact import REPLACE_STR
import yaml
import json
from copy import copy
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',]
@ -124,6 +124,13 @@ class WorkflowNodeBase(CreatedModifiedModel):
data['missing'] = missing_dict
return data
def get_parent_nodes(self):
'''Returns queryset containing all parents of this node'''
success_parents = getattr(self, '%ss_success' % self.__class__.__name__.lower()).all()
failure_parents = getattr(self, '%ss_failure' % self.__class__.__name__.lower()).all()
always_parents = getattr(self, '%ss_always' % self.__class__.__name__.lower()).all()
return success_parents | failure_parents | always_parents
@classmethod
def _get_workflow_job_field_names(cls):
'''
@ -175,11 +182,22 @@ class WorkflowJobNode(WorkflowNodeBase):
default=None,
on_delete=models.CASCADE,
)
ancestor_artifacts = JSONField(
blank=True,
default={},
editable=False,
)
def get_absolute_url(self):
return reverse('api:workflow_job_node_detail', args=(self.pk,))
def get_job_kwargs(self):
'''
In advance of creating a new unified job as part of a workflow,
this method builds the attributes to use
It alters the node by saving its updated version of
ancestor_artifacts, making it available to subsequent nodes.
'''
# reject/accept prompted fields
data = {}
ujt_obj = self.unified_job_template
@ -189,19 +207,31 @@ class WorkflowJobNode(WorkflowNodeBase):
accepted_fields.pop(fd)
data.update(accepted_fields)
# TODO: decide what to do in the event of missing fields
# build ancestor artifacts, save them to node model for later
aa_dict = {}
for parent_node in self.get_parent_nodes():
aa_dict.update(parent_node.ancestor_artifacts)
if parent_node.job and hasattr(parent_node.job, 'artifacts'):
aa_dict.update(parent_node.job.artifacts)
if aa_dict:
self.ancestor_artifacts = aa_dict
self.save(update_fields=['ancestor_artifacts'])
if '_ansible_no_log' in aa_dict:
# TODO: merge Workflow Job survey passwords into this
password_dict = {}
for key in aa_dict:
if key != '_ansible_no_log':
password_dict[key] = REPLACE_STR
data['survey_passwords'] = password_dict
# process extra_vars
# TODO: still lack consensus about variable precedence
extra_vars = {}
if self.workflow_job and self.workflow_job.extra_vars:
try:
WJ_json_extra_vars = json.loads(
(self.workflow_job.extra_vars or '').strip() or '{}')
except ValueError:
try:
WJ_json_extra_vars = yaml.safe_load(self.workflow_job.extra_vars)
except yaml.YAMLError:
WJ_json_extra_vars = {}
extra_vars.update(WJ_json_extra_vars)
# TODO: merge artifacts, add ancestor_artifacts to kwargs
extra_vars.update(self.workflow_job.extra_vars_dict)
if aa_dict:
functional_aa_dict = copy(aa_dict)
functional_aa_dict.pop('_ansible_no_log', None)
extra_vars.update(functional_aa_dict)
if extra_vars:
data['extra_vars'] = extra_vars
return data
@ -384,6 +414,6 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow
if res:
self.status = 'running'
self.save()
self.socketio_emit_status("running")
self.websocket_emit_status("running")
return res

View File

@ -893,7 +893,7 @@ class RunJob(BaseTask):
'tower_user_name': job.created_by.username,
})
if job.extra_vars_dict:
if kwargs.get('display', False) and job.job_template and job.job_template.survey_enabled:
if kwargs.get('display', False) and job.job_template:
extra_vars.update(json.loads(job.display_extra_vars()))
else:
extra_vars.update(job.extra_vars_dict)
@ -1685,7 +1685,7 @@ class RunWorkflowJob(BaseTask):
def run(self, pk, **kwargs):
#Run the job/task and capture its output.
instance = self.update_model(pk, status='running', celery_task_id=self.request.id)
instance.socketio_emit_status("running")
instance.websocket_emit_status("running")
# FIXME: Currently, the workflow job busy waits until the graph run is
# complete. Instead, the workflow job should return or never even run,
@ -1707,6 +1707,6 @@ class RunWorkflowJob(BaseTask):
instance = self.update_model(instance.pk, status='successful')
break
time.sleep(1)
instance.socketio_emit_status(instance.status)
instance.websocket_emit_status(instance.status)
# TODO: Handle cancel
'''

View File

@ -1,5 +1,6 @@
# Python
import time
import pytest
from awx.main.tests.factories import (
@ -52,3 +53,15 @@ def get_ssh_version(mocker):
@pytest.fixture
def job_template_with_survey_passwords_unit(job_template_with_survey_passwords_factory):
return job_template_with_survey_passwords_factory(persisted=False)
@pytest.fixture
def enterprise_license():
from awx.main.task_engine import TaskEnhancer
return TaskEnhancer(
company_name='AWX',
contact_name='AWX Admin',
contact_email='awx@example.com',
license_date=int(time.time() + 3600),
instance_count=10000,
license_type='enterprise',
).enhance()

View File

@ -0,0 +1,30 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
# Python
import pytest
# Django
from django.core.urlresolvers import reverse
# AWX
from awx.conf.models import Setting
@pytest.mark.django_db
def test_license_cannot_be_removed_via_system_settings(get, put, patch, delete, admin, enterprise_license):
url = reverse('api:setting_singleton_detail', args=('system',))
response = get(url, user=admin, expect=200)
assert not response.data['LICENSE']
Setting.objects.create(key='LICENSE', value=enterprise_license)
response = get(url, user=admin, expect=200)
assert response.data['LICENSE']
put(url, user=admin, data=response.data, expect=200)
response = get(url, user=admin, expect=200)
assert response.data['LICENSE']
patch(url, user=admin, data={}, expect=200)
response = get(url, user=admin, expect=200)
assert response.data['LICENSE']
delete(url, user=admin, expect=204)
response = get(url, user=admin, expect=200)
assert response.data['LICENSE']

View File

@ -3,8 +3,11 @@
import pytest
# AWX
from awx.main.models.workflow import WorkflowJob, WorkflowJobTemplateNode
from awx.main.models.workflow import WorkflowJob, WorkflowJobNode, WorkflowJobTemplateNode
from awx.main.models.jobs import Job
from awx.main.models.projects import ProjectUpdate
@pytest.mark.django_db
class TestWorkflowJob:
@pytest.fixture
def workflow_job(self, workflow_job_template_factory):
@ -21,7 +24,6 @@ class TestWorkflowJob:
return wfj
@pytest.mark.django_db
def test_inherit_job_template_workflow_nodes(self, mocker, workflow_job):
workflow_job.inherit_job_template_workflow_nodes()
@ -31,4 +33,60 @@ class TestWorkflowJob:
assert nodes[0].failure_nodes.filter(id=nodes[3].id).exists()
assert nodes[3].failure_nodes.filter(id=nodes[4].id).exists()
def test_inherit_ancestor_artifacts_from_job(self, project, mocker):
"""
Assure that nodes along the line of execution inherit artifacts
from both jobs ran, and from the accumulation of old jobs
"""
# Related resources
wfj = WorkflowJob.objects.create(name='test-wf-job')
job = Job.objects.create(name='test-job', artifacts={'b': 43})
# Workflow job nodes
job_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=job,
ancestor_artifacts={'a': 42})
queued_node = WorkflowJobNode.objects.create(workflow_job=wfj)
# Connect old job -> new job
mocker.patch.object(queued_node, 'get_parent_nodes', lambda: [job_node])
assert queued_node.get_job_kwargs()['extra_vars'] == {'a': 42, 'b': 43}
assert queued_node.ancestor_artifacts == {'a': 42, 'b': 43}
def test_inherit_ancestor_artifacts_from_project_update(self, project, mocker):
"""
Test that the existence of a project update (no artifacts) does
not break the flow of ancestor_artifacts
"""
# Related resources
wfj = WorkflowJob.objects.create(name='test-wf-job')
update = ProjectUpdate.objects.create(name='test-update', project=project)
# Workflow job nodes
project_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=update,
ancestor_artifacts={'a': 42, 'b': 43})
queued_node = WorkflowJobNode.objects.create(workflow_job=wfj)
# Connect project update -> new job
mocker.patch.object(queued_node, 'get_parent_nodes', lambda: [project_node])
assert queued_node.get_job_kwargs()['extra_vars'] == {'a': 42, 'b': 43}
assert queued_node.ancestor_artifacts == {'a': 42, 'b': 43}
@pytest.mark.django_db
class TestWorkflowJobTemplate:
@pytest.fixture
def wfjt(self, workflow_job_template_factory):
wfjt = workflow_job_template_factory('test').workflow_job_template
nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt) for i in range(0, 3)]
nodes[0].success_nodes.add(nodes[1])
nodes[1].failure_nodes.add(nodes[2])
return wfjt
def test_node_parentage(self, wfjt):
# test success parent
wfjt_node = wfjt.workflow_job_template_nodes.all()[1]
parent_qs = wfjt_node.get_parent_nodes()
assert len(parent_qs) == 1
assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[0]
# test failure parent
wfjt_node = wfjt.workflow_job_template_nodes.all()[2]
parent_qs = wfjt_node.get_parent_nodes()
assert len(parent_qs) == 1
assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[1]

View File

@ -6,6 +6,7 @@ from awx.main.models.workflow import (
WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin,
WorkflowJob, WorkflowJobNode
)
import mock
class TestWorkflowJobInheritNodesMixin():
class TestCreateWorkflowJobNodes():
@ -151,6 +152,7 @@ class TestWorkflowJobCreate:
unified_job_template=wfjt_node_with_prompts.unified_job_template,
workflow_job=workflow_job_unit)
@mock.patch('awx.main.models.workflow.WorkflowNodeBase.get_parent_nodes', lambda self: [])
class TestWorkflowJobNodeJobKWARGS:
"""
Tests for building the keyword arguments that go into creating and

View File

@ -184,6 +184,9 @@ class BaseCallbackModule(object):
if getattr(self, 'ad_hoc_command_id', None):
msg['ad_hoc_command_id'] = self.ad_hoc_command_id
if getattr(self, 'artifact_data', None):
msg['artifact_data'] = self.artifact_data
active_pid = os.getpid()
if self.job_callback_debug:
msg.update({
@ -416,6 +419,9 @@ class JobCallbackModule(BaseCallbackModule):
event_data['task'] = task_name
if role_name and event not in self.EVENTS_WITHOUT_TASK:
event_data['role'] = role_name
self.artifact_data = None
if 'res' in event_data and 'artifact_data' in event_data['res']:
self.artifact_data = event_data['res']['artifact_data']
super(JobCallbackModule, self)._log_event(event, **event_data)
def playbook_on_start(self):

View File

@ -361,21 +361,20 @@ CELERY_QUEUES = (
Broadcast('projects'),
)
CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_project_update': {'queue': 'projects'},
'awx.main.tasks.run_inventory_update': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_ad_hoc_command': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_system_job': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.scheduler.tasks.run_job_launch': {'queue': 'scheduler',
'routing_key': 'scheduler.job.launch'},
'awx.main.scheduler.tasks.run_job_complete': {'queue': 'scheduler',
'routing_key': 'scheduler.job.complete'},
'awx.main.tasks.cluster_node_heartbeat': {'queue': 'default',
'routing_key': 'cluster.heartbeat'},
}
'routing_key': 'jobs'},
'awx.main.tasks.run_project_update': {'queue': 'projects'},
'awx.main.tasks.run_inventory_update': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_ad_hoc_command': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_system_job': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.scheduler.tasks.run_job_launch': {'queue': 'scheduler',
'routing_key': 'scheduler.job.launch'},
'awx.main.scheduler.tasks.run_job_complete': {'queue': 'scheduler',
'routing_key': 'scheduler.job.complete'},
'awx.main.tasks.cluster_node_heartbeat': {'queue': 'default',
'routing_key': 'cluster.heartbeat'},}
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {
@ -473,9 +472,6 @@ SOCIAL_AUTH_SAML_TECHNICAL_CONTACT = {}
SOCIAL_AUTH_SAML_SUPPORT_CONTACT = {}
SOCIAL_AUTH_SAML_ENABLED_IDPS = {}
SOCIAL_AUTH_ORGANIZATION_MAP = {}
SOCIAL_AUTH_TEAM_MAP = {}
# Any ANSIBLE_* settings will be passed to the subprocess environment by the
# celery task.

View File

@ -126,7 +126,8 @@ register(
register(
'SOCIAL_AUTH_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
default={},
allow_null=True,
default=None,
label=_('Social Auth Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('Authentication'),
@ -137,7 +138,8 @@ register(
register(
'SOCIAL_AUTH_TEAM_MAP',
field_class=fields.SocialTeamMapField,
default={},
allow_null=True,
default=None,
label=_('Social Auth Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('Authentication'),

View File

@ -13,7 +13,17 @@ export default
init: function() {
var self = this,
host = window.location.host,
url = "ws://" + host + "/websocket/";
protocol,
url;
if($location.protocol() === 'http'){
protocol = 'ws';
}
if($location.protocol() === 'https'){
protocol = 'wss';
}
url = `${protocol}://${host}/websocket/`;
if (!$rootScope.sessionTimer || ($rootScope.sessionTimer && !$rootScope.sessionTimer.isExpired())) {
// We have a valid session token, so attempt socket connection
$log.debug('Socket connecting to: ' + url);
@ -83,7 +93,7 @@ export default
// The naming scheme is "ws" then a
// dash (-) and the group_name, then the job ID
// ex: 'ws-jobs-<jobId>'
str = `ws-${data.group_name}-${data.job}`
str = `ws-${data.group_name}-${data.job}`;
}
else if(data.group_name==="ad_hoc_command_events"){
// The naming scheme is "ws" then a
@ -194,7 +204,7 @@ export default
// This function is used for add a state resolve to all states,
// socket-enabled AND socket-disabled, and whether the $state
// requires a subscribe or an unsubscribe
self = this;
var self = this;
socketPromise.promise.then(function(){
if(!state.socket){
state.socket = {groups: {}};

View File

@ -1,5 +1,5 @@
git+https://github.com/chrismeyersfsu/ansiconv.git@tower_1.0.0#egg=ansiconv
amqp==1.4.5
amqp==1.4.9
anyjson==0.3.3
appdirs==1.4.0
azure==2.0.0rc2

View File

@ -11,8 +11,9 @@ services:
RABBITMQ_VHOST: /
ports:
- "8080:8080"
- "8013:8013"
- "5555:5555"
- "8050:8050"
- "8051:8051"
links:
- postgres
- memcached
@ -34,6 +35,14 @@ services:
ports:
- "15672:15672"
nginx:
image: gcr.io/ansible-tower-engineering/tower_nginx:${TAG}
ports:
- "8043:443"
- "8013:80"
links:
- tower
# Source Code Synchronization Container
# sync:
# build:

View File

@ -27,6 +27,6 @@ RUN ln -s /tower_devel/tools/docker-compose/start_development.sh /start_developm
WORKDIR /tmp
RUN SWIG_FEATURES="-cpperraswarn -includeall -D__`uname -m`__ -I/usr/include/openssl" VENV_BASE="/venv" make requirements_dev
WORKDIR /
EXPOSE 8013 8080 22
EXPOSE 8050 8051 8080 22
ENTRYPOINT ["/usr/bin/dumb-init"]
CMD /start_development.sh