1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-31 06:51:10 +03:00

Merge pull request #6142 from jangsutsr/6113_allow_concurrent_workflow_job_runs

Allow concurrent workflow job runs
This commit is contained in:
Aaron Tan 2017-05-02 16:41:32 -04:00 committed by GitHub
commit 6fe133dc7f
6 changed files with 40 additions and 23 deletions

View File

@ -2510,7 +2510,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
class Meta:
model = WorkflowJobTemplate
fields = ('*', 'extra_vars', 'organization', 'survey_enabled',)
fields = ('*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',)
def get_related(self, obj):
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
@ -2547,7 +2547,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
class Meta:
model = WorkflowJob
fields = ('*', 'workflow_job_template', 'extra_vars')
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',)
def get_related(self, obj):
res = super(WorkflowJobSerializer, self).get_related(obj)

View File

@ -147,4 +147,16 @@ class Migration(migrations.Migration):
name='verbosity',
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
),
# Workflows
migrations.AddField(
model_name='workflowjob',
name='allow_simultaneous',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='workflowjobtemplate',
name='allow_simultaneous',
field=models.BooleanField(default=False),
),
]

View File

@ -284,6 +284,9 @@ class WorkflowJobOptions(BaseModel):
blank=True,
default='',
))
allow_simultaneous = models.BooleanField(
default=False
)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@ -356,7 +359,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
@classmethod
def _get_unified_job_field_names(cls):
return ['name', 'description', 'extra_vars', 'labels', 'survey_passwords',
'schedule', 'launch_type']
'schedule', 'launch_type', 'allow_simultaneous']
@classmethod
def _get_unified_jt_copy_names(cls):

View File

@ -78,7 +78,7 @@ class DependencyGraph(object):
'''
JobDict
Presume that job is related to a project that is update on launch
'''
def should_update_related_project(self, job):
@ -98,7 +98,7 @@ class DependencyGraph(object):
'''
This is a bit of fuzzy logic.
If the latest project update has a created time == job_created_time-1
If the latest project update has a created time == job_created_time-1
then consider the project update found. This is so we don't enter an infinite loop
of updating the project when cache timeout is 0.
'''
@ -178,6 +178,8 @@ class DependencyGraph(object):
return False
def can_workflow_job_run(self, job):
if job['allow_simultaneous'] is True:
return True
return self.data[self.WORKFLOW_JOB_TEMPLATES_JOBS].get(job['workflow_job_template_id'], True)
def can_system_job_run(self):
@ -217,4 +219,3 @@ class DependencyGraph(object):
def add_jobs(self, jobs):
map(lambda j: self.add_job(j), jobs)

View File

@ -58,7 +58,7 @@ class PartialModelDict(object):
def get_job_type_str(self):
raise RuntimeError("Inherit and implement me")
def task_impact(self):
raise RuntimeError("Inherit and implement me")
@ -87,8 +87,8 @@ class PartialModelDict(object):
class JobDict(PartialModelDict):
FIELDS = (
'id', 'status', 'job_template_id', 'inventory_id', 'project_id',
'launch_type', 'limit', 'allow_simultaneous', 'created',
'id', 'status', 'job_template_id', 'inventory_id', 'project_id',
'launch_type', 'limit', 'allow_simultaneous', 'created',
'job_type', 'celery_task_id', 'project__scm_update_on_launch',
'forks', 'start_args', 'dependent_jobs__id',
)
@ -119,15 +119,15 @@ class JobDict(PartialModelDict):
class ProjectUpdateDict(PartialModelDict):
FIELDS = (
'id', 'status', 'project_id', 'created', 'celery_task_id',
'launch_type', 'project__scm_update_cache_timeout',
'id', 'status', 'project_id', 'created', 'celery_task_id',
'launch_type', 'project__scm_update_cache_timeout',
'project__scm_update_on_launch',
)
model = ProjectUpdate
def get_job_type_str(self):
return 'project_update'
def task_impact(self):
return 10
@ -142,8 +142,8 @@ class ProjectUpdateDict(PartialModelDict):
class ProjectUpdateLatestDict(ProjectUpdateDict):
FIELDS = (
'id', 'status', 'project_id', 'created', 'finished',
'project__scm_update_cache_timeout',
'id', 'status', 'project_id', 'created', 'finished',
'project__scm_update_cache_timeout',
'launch_type', 'project__scm_update_on_launch',
)
model = ProjectUpdate
@ -162,7 +162,7 @@ class ProjectUpdateLatestDict(ProjectUpdateDict):
class InventoryUpdateDict(PartialModelDict):
#'inventory_source__update_on_launch',
#'inventory_source__update_on_launch',
#'inventory_source__update_cache_timeout',
FIELDS = (
'id', 'status', 'created', 'celery_task_id', 'inventory_source_id',
@ -178,10 +178,10 @@ class InventoryUpdateDict(PartialModelDict):
class InventoryUpdateLatestDict(InventoryUpdateDict):
#'inventory_source__update_on_launch',
#'inventory_source__update_on_launch',
#'inventory_source__update_cache_timeout',
FIELDS = (
'id', 'status', 'created', 'celery_task_id', 'inventory_source_id',
'id', 'status', 'created', 'celery_task_id', 'inventory_source_id',
'finished', 'inventory_source__update_cache_timeout', 'launch_type',
'inventory_source__update_on_launch',
)
@ -198,7 +198,7 @@ class InventoryUpdateLatestDict(InventoryUpdateDict):
update_on_launch=True).values_list('id', flat=True)
# Find the most recent inventory update for each inventory source
for inventory_source_id in inventory_source_ids:
qs = cls.model.objects.filter(inventory_source_id=inventory_source_id,
qs = cls.model.objects.filter(inventory_source_id=inventory_source_id,
status__in=['waiting', 'successful', 'failed'],
inventory_source__update_on_launch=True).order_by('-finished', '-started', '-created')
if qs.count() > 0:
@ -263,7 +263,7 @@ class AdHocCommandDict(PartialModelDict):
class WorkflowJobDict(PartialModelDict):
FIELDS = (
'id', 'created', 'status', 'workflow_job_template_id',
'id', 'created', 'status', 'workflow_job_template_id', 'allow_simultaneous',
)
model = WorkflowJob
@ -272,4 +272,3 @@ class WorkflowJobDict(PartialModelDict):
def task_impact(self):
return 0

View File

@ -11,7 +11,7 @@ Like other job resources, workflow jobs are created from workflow job templates.
The CRUD operations against a workflow job template and its corresponding workflow jobs are almost identical to those of normal job templates and related jobs. However, from an RBAC perspective, CRUD on workflow job templates/jobs are limited to super users. That is, an organization administrator takes full control over all workflow job templates/jobs under the same organization, while an organization auditor is able to see workflow job templates/jobs under the same organization. On the other hand, ordinary organization members have no, and are not able to gain, permission over any workflow-related resources.
### Workflow Nodes
Workflow Nodes are containers of workflow spawned job resources and function as nodes of workflow decision trees. Like that of workflow itself, the two types of workflow nodes are workflow job template nodes and workflow job nodes.
Workflow Nodes are containers of workflow spawned job resources and function as nodes of workflow decision trees. Like that of workflow itself, the two types of workflow nodes are workflow job template nodes and workflow job nodes.
Workflow job template nodes are listed and created under endpoint `/workflow_job_templates/\d+/workflow_nodes/` to be associated with underlying workflow job template, or directly under endpoint `/workflow_job_template_nodes/`. The most important fields of a workflow job template node are `success_nodes`, `failure_nodes`, `always_nodes`, `unified_job_template` and `workflow_job_template`. The former three are lists of workflow job template nodes that, in union, forms the set of all its child nodes, in specific, `success_nodes` are triggered when parnent node job succeeds, `failure_nodes` are triggered when parent node job fails, and `always_nodes` are triggered regardless of whether parent job succeeds or fails; The later two reference the job template resource it contains and workflow job template it belongs to.
@ -46,6 +46,8 @@ Workflow job summary:
...
```
Starting from Tower 3.2, Workflow jobs support simultaneous job runs just like that of ordinary jobs. It is controlled by `allow_simultaneous` field of underlying workflow job template. By default, simultaneous workflow job runs are disabled and users should be prudent in enabling this functionality. Because the performance boost of simultaneous workflow runs will only manifest when a large portion of jobs contained by a workflow allow simultaneous runs. Otherwise it is expected to have some long-running workflow jobs since its spawned jobs can be in pending state for a long time.
### Workflow Copy and Relaunch
Other than the normal way of creating workflow job templates, it is also possible to copy existing workflow job templates. The resulting new workflow job template will be mostly identical to the original, except for `name` field which will be appended a text to indicate it's a copy.
@ -59,7 +61,7 @@ Artifact support starts in Ansible and is carried through in Tower. The `set_sta
## Test Coverage
### CRUD-related
* Verify that CRUD operations on all workflow resources are working properly. Note workflow job nodes cannot be created or deleted independently, but verifications are needed to make sure when a workflow job is deleted, all its related workflow job nodes are deleted.
* Verify the RBAC property of workflow resources. In specific:
* Verify the RBAC property of workflow resources. In specific:
* Workflow job templates can only be accessible by superusers ---- system admin, admin of the same organization and system auditor and auditor of the same organization with read permission only.
* Workflow job read and delete permissions follow from its associated workflow job template.
* Workflow job relaunch permission consists of the union of execute permission to its associated workflow job template, and the permission to re-create all the nodes inside of the workflow job.