1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-27 00:55:06 +03:00

fix access problems (#15)

* fix access problems and add  Add bulk job max settings to api

filter workflow job nodes better

This will both improve performance by limiting the queryset for the node
sublists as well as fix our access problem.

override can_read instead of modify queryset in access.py

We do this because we are not going to expose bulk jobs to the list
views, which is complicatd and has poor performance implications.

Instead, we just care about individual Workflows that clients get linked
to not being broken.

fix comment

remove the get functions from the conf.py for bulk api max value

comment the api expose of the bulk job variables

reformt conf.py with make black

trailing space

add more assertion to the bulk host create test
This commit is contained in:
Elijah DeLee 2023-02-28 23:14:06 -05:00
parent 4b9ca3deee
commit 3efc7d5bc4
6 changed files with 56 additions and 7 deletions

View File

@ -3078,7 +3078,9 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobTemplateJobsList(SubListAPIView):
@ -3172,7 +3174,9 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobCancel(GenericCancelView):

View File

@ -1999,10 +1999,14 @@ class WorkflowJobNodeAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(workflow_job__created_by_id=self.user.id, workflow_job__is_bulk_job=True)
| Q(workflow_job__organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), workflow_job__is_bulk_job=True)
)
def can_read(self, obj):
if obj.workflow_job.is_bulk_job and obj.workflow_job.created_by_id == self.user.id:
return True
return super().can_read(obj)
@check_superuser
def can_add(self, data):
if data is None: # Hide direct creation in API browser
@ -2129,10 +2133,14 @@ class WorkflowJobAccess(BaseAccess):
def filtered_queryset(self):
return WorkflowJob.objects.filter(
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(created_by_id=self.user.id, is_bulk_job=True)
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
)
def can_read(self, obj):
if obj.is_bulk_job and obj.created_by_id == self.user.id:
return True
return super().can_read(obj)
def can_add(self, data):
# Old add-start system for launching jobs is being depreciated, and
# not supported for new types of resources

View File

@ -775,6 +775,27 @@ register(
help_text=_('Indicates whether the instance is part of a kubernetes-based deployment.'),
)
# TODO : Commenting below bulk job settings because of failing conftest import. Figure out the conftest issue and then uncomment
# register(
# 'BULK_JOB_MAX_LAUNCH',
# field_class=fields.IntegerField,
# default=100,
# label=_('Max jobs to allow bulk jobs to launch'),
# help_text=_('Max jobs to allow bulk jobs to launch'),
# category=_('Bulk Actions'),
# category_slug='bulk',
# )
#
# register(
# 'BULK_HOST_MAX_CREATE',
# field_class=fields.IntegerField,
# default=1000,
# label=_('Max number of hosts to allow to be created in a single bulk action'),
# help_text=_('Max number of hosts to allow to be created in a single bulk action'),
# category=_('Bulk Actions'),
# category_slug='bulk',
# )
def logging_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):

View File

@ -5,7 +5,7 @@ from uuid import uuid4
from awx.api.versioning import reverse
from awx.main.models.jobs import JobTemplate
from awx.main.models import Organization, Inventory, WorkflowJob, ExecutionEnvironment
from awx.main.models import Organization, Inventory, WorkflowJob, ExecutionEnvironment, Host
from awx.main.scheduler import TaskManager
@ -70,6 +70,7 @@ def test_bulk_host_create_rbac(organization, inventory, post, get, user):
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar-{indx}'}]}, u, expect=201
).data
assert len(bulk_host_create_response['hosts']) == 1, f"unexpected number of hosts created for user {u}"
assert Host.objects.filter(inventory__id=inventory.id)[0].name == 'foobar-0'
for indx, u in enumerate([member, auditor, use_inv_member]):
bulk_host_create_response = post(

View File

@ -197,7 +197,8 @@ EXAMPLES = '''
name: My Bulk Job Launch
jobs:
- unified_job_template: 7
- unified_job_template: "{{ lookup('awx.awx.controller_api', 'job_templates', query_params={'name': 'Demo Job Template'}, return_ids=True, expect_one=True) }}"
- unified_job_template: "{{ lookup('awx.awx.controller_api', 'job_templates', query_params={'name': 'Demo Job Template'},
return_ids=True, expect_one=True) }}"
'''
from ..module_utils.controller_api import ControllerAPIModule

View File

@ -21,6 +21,8 @@ Following is an example of a post request at the /api/v2/bulk/job_launch
The above will launch a workflow job with 3 nodes in it.
The maximum number of jobs allowed to be launched in one bulk launch is controlled by the setting `BULK_JOB_MAX_LAUNCH`.
**Important Note: A bulk job launched by a normal user will not be visible in the jobs section of the UI, although the individual jobs within a bulk job can be seen there.**
If the job template has fields marked as prompt on launch, those can be provided for each job in the bulk job launch as well:
@ -48,6 +50,16 @@ Prompted field value can also be provided at the top level. For example:
In the above example, `inventory: 2` will get used for the job templates (11, 12 and 13) in which inventory is marked as prompt of launch.
### RBAC For Bulk Job Launch
#### Who can bulk launch?
Anyone who is logged in can view the launch point. In order to launch a unified_job_template, you need to have either `update` or `execute` depending on the type of unified job (job template, project update, etc).
#### Who can see bulk jobs that have been run?
System admins and Organization admins will see Bulk Jobs in the workflow jobs list and the unified jobs list. They can additionally see these individual workflow jobs.
Regular users can only see the individual workflow jobs that were launched by their bulk job launch. These jobs do not appear in the unified jobs list, nor do they show in the workflow jobs list. This is important because the response to a bulk job launch includes a link to the parent workflow job.
## Bulk Host Create
Provides feature in the API that allows a single web request to create multiple hosts in an inventory.
@ -61,4 +73,6 @@ Following is an example of a post request at the /api/v2/bulk/host_create:
}
The above will add 6 hosts in the inventory.
The above will add 6 hosts in the inventory.
The maximum number of hosts allowed to be added is controlled by the setting `BULK_HOST_MAX_CREATE`. The default is 1,000 hosts. Additionally, nginx limits the maximum payload size, which is very likely when posting a large number of hosts in one request with variable data associated with them. The maximum payload size is 1MB unless overridden in your nginx config.