1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-26 07:55:24 +03:00

Merge branch 'ansible:devel' into devel

This commit is contained in:
Neev Geffen 2024-08-30 11:34:11 +03:00 committed by GitHub
commit 8ba47ef1f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1946 changed files with 1109 additions and 343520 deletions

View File

@ -57,16 +57,6 @@ runs:
awx-manage update_password --username=admin --password=password
EOSH
- name: Build UI
# This must be a string comparison in composite actions:
# https://github.com/actions/runner/issues/2238
if: ${{ inputs.build-ui == 'true' }}
shell: bash
run: |
docker exec -i tools_awx_1 sh <<-EOSH
make ui-devel
EOSH
- name: Get instance data
id: data
shell: bash

View File

@ -6,8 +6,6 @@ needs_triage:
- "Feature Summary"
"component:ui":
- "\\[X\\] UI"
"component:ui_next":
- "\\[X\\] UI \\(tech preview\\)"
"component:api":
- "\\[X\\] API"
"component:docs":

View File

@ -1,8 +1,5 @@
"component:api":
- any: ["awx/**/*", "!awx/ui/**"]
"component:ui":
- any: ["awx/ui/**/*"]
- any: ["awx/**/*"]
"component:docs":
- any: ["docs/**/*"]
@ -14,5 +11,4 @@
- any: ["awx_collection/**/*"]
"dependencies":
- any: ["awx/ui/package.json"]
- any: ["requirements/*"]

View File

@ -1,7 +1,7 @@
## General
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html

View File

@ -31,14 +31,11 @@ jobs:
command: /start_tests.sh test_collection_all
- name: api-schema
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
- name: ui-lint
command: make ui-lint
- name: ui-test-screens
command: make ui-test-screens
- name: ui-test-general
command: make ui-test-general
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
@ -52,7 +49,9 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/run_awx_devel
id: awx
@ -70,13 +69,15 @@ jobs:
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
steps:
- name: Checkout awx
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
path: awx
- name: Checkout awx-operator
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false\
repository: ansible/awx-operator
path: awx-operator
@ -130,7 +131,9 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
- name: Upgrade ansible-core
@ -154,7 +157,9 @@ jobs:
- name: r-z0-9
regex: ^[r-z0-9]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/run_awx_devel
id: awx
@ -200,7 +205,9 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core

View File

@ -35,7 +35,9 @@ jobs:
exit 0
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@ -60,18 +62,6 @@ jobs:
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Setup node and npm for old UI build
uses: actions/setup-node@v2
with:
node-version: '16'
if: matrix.build-targets.image-name == 'awx'
- name: Prebuild old-UI for awx image (to speed up build process)
run: |
sudo apt-get install gettext
make ui-release
if: matrix.build-targets.image-name == 'awx'
- name: Setup node and npm for the new UI build
uses: actions/setup-node@v2
with:
@ -80,7 +70,7 @@ jobs:
- name: Prebuild new UI for awx image (to speed up build process)
run: |
make ui-next
make ui
if: matrix.build-targets.image-name == 'awx'
- name: Build and push AWX devel images

View File

@ -8,7 +8,9 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- name: install tox
run: pip install tox

View File

@ -30,7 +30,10 @@ jobs:
timeout-minutes: 20
name: Label Issue - Community
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- uses: actions/setup-python@v4
- name: Install python requests
run: pip install requests

View File

@ -29,7 +29,10 @@ jobs:
timeout-minutes: 20
name: Label PR - Community
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- uses: actions/setup-python@v4
- name: Install python requests
run: pip install requests

View File

@ -32,7 +32,9 @@ jobs:
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
- name: Checkout awx
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV

View File

@ -45,19 +45,22 @@ jobs:
exit 0
- name: Checkout awx
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
path: awx
- name: Checkout awx-operator
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Checkout awx-logos
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
repository: ansible/awx-logos
path: awx-logos
@ -86,17 +89,6 @@ jobs:
run: |
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for old UI build
uses: actions/setup-node@v2
with:
node-version: '16'
- name: Prebuild old UI for awx image (to speed up build process)
working-directory: awx
run: |
sudo apt-get install gettext
make ui-release
- name: Setup node and npm for new UI build
uses: actions/setup-node@v2
with:
@ -104,7 +96,7 @@ jobs:
- name: Prebuild new UI for awx image (to speed up build process)
working-directory: awx
run: make ui-next
run: make ui
- name: Set build env variables
run: |

View File

@ -13,7 +13,9 @@ jobs:
steps:
- name: Checkout branch
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
show-progress: false
- name: Update PR Body
env:

View File

@ -18,7 +18,9 @@ jobs:
packages: write
contents: read
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
show-progress: false
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV

23
.gitignore vendored
View File

@ -20,23 +20,10 @@ awx/projects
awx/job_output
awx/public/media
awx/public/static
awx/ui/tests/test-results.xml
awx/ui/client/src/local_settings.json
awx/main/fixtures
awx/*.log
tower/tower_warnings.log
celerybeat-schedule
awx/ui/static
awx/ui/build_test
awx/ui/client/languages
awx/ui/templates/ui/index.html
awx/ui/templates/ui/installing.html
awx/ui/node_modules/
awx/ui/src/locales/*/messages.js
awx/ui/coverage/
awx/ui/build
awx/ui/.env.local
awx/ui/instrumented
rsyslog.pid
tools/docker-compose/ansible/awx_dump.sql
tools/docker-compose/Dockerfile
@ -79,11 +66,6 @@ __pycache__
/tmp
**/npm-debug.log*
# UI build flag files
awx/ui/.deps_built
awx/ui/.release_built
awx/ui/.release_deps_built
# Testing
.cache
.coverage
@ -161,15 +143,14 @@ use_dev_supervisor.txt
.idea/*
*.unison.tmp
*.#
/awx/ui/.ui-built
/_build/
/_build_kube_dev/
/Dockerfile
/Dockerfile.dev
/Dockerfile.kube-dev
awx/ui_next/src
awx/ui_next/build
awx/ui/src
awx/ui/build
# Docs build stuff
docs/docsite/build/

View File

@ -5,8 +5,6 @@ ignore: |
awx/main/tests/data/inventory/plugins/**
# vault files
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
awx/ui/test/e2e/tests/smoke-vars.yml
awx/ui/node_modules
tools/docker-compose/_sources
# django template files
awx/api/templates/instance_install_bundle/**

View File

@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
#### Frontend Development
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
#### Fork and clone the AWX repo
@ -121,18 +121,18 @@ If it has someone assigned to it then that person is the person responsible for
**NOTES**
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
### Translations
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
## Submitting Pull Requests
@ -143,10 +143,8 @@ Here are a few things you can do to help the visibility of your change, and incr
- No issues when running linters/code checkers
- Python: black: `(container)/awx_devel$ make black`
- Javascript: `(container)/awx_devel$ make ui-lint`
- No issues from unit tests
- Python: py.test: `(container)/awx_devel$ make test`
- JavaScript: `(container)/awx_devel$ make ui-test`
- Write tests for new functionality, update/add tests for bug fixes
- Make the smallest change possible
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
@ -161,7 +159,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
## Reporting Issues
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
## Getting Help

View File

@ -4,9 +4,7 @@ recursive-include awx *.mo
recursive-include awx/static *
recursive-include awx/templates *.html
recursive-include awx/api/templates *.md *.html *.yml
recursive-include awx/ui/build *.html
recursive-include awx/ui/build *
recursive-include awx/ui_next/build *
recursive-include awx/playbooks *.yml
recursive-include awx/lib/site-packages *
recursive-include awx/plugins *.ps1
@ -17,7 +15,6 @@ recursive-include licenses *
recursive-exclude awx devonly.py*
recursive-exclude awx/api/tests *
recursive-exclude awx/main/tests *
recursive-exclude awx/ui/client *
recursive-exclude awx/settings local_settings.py*
include tools/scripts/request_tower_configuration.sh
include tools/scripts/request_tower_configuration.ps1

View File

@ -1,4 +1,4 @@
-include awx/ui_next/Makefile
-include awx/ui/Makefile
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
SHELL := bash
@ -107,7 +107,6 @@ endif
develop refresh adduser migrate dbchange \
receiver test test_unit test_coverage coverage_html \
sdist \
ui-release ui-devel \
VERSION PYTHON_VERSION docker-compose-sources \
.git/hooks/pre-commit
@ -130,7 +129,7 @@ clean-languages:
find ./awx/locale/ -type f -regex '.*\.mo$$' -delete
## Remove temporary build files, compiled Python files.
clean: clean-ui clean-api clean-awxkit clean-dist
clean: clean-api clean-awxkit clean-dist
rm -rf awx/public
rm -rf awx/lib/site-packages
rm -rf awx/job_status
@ -439,76 +438,7 @@ bulk_data:
fi; \
$(PYTHON) tools/data_generators/rbac_dummy_data_generator.py --preset=$(DATA_GEN_PRESET)
# UI TASKS
# --------------------------------------
UI_BUILD_FLAG_FILE = awx/ui/.ui-built
clean-ui:
rm -rf node_modules
rm -rf awx/ui/node_modules
rm -rf awx/ui/build
rm -rf awx/ui/src/locales/_build
rm -rf $(UI_BUILD_FLAG_FILE)
# the collectstatic command doesn't like it if this dir doesn't exist.
mkdir -p awx/ui/build/static
awx/ui/node_modules:
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
$(UI_BUILD_FLAG_FILE):
$(MAKE) awx/ui/node_modules
$(PYTHON) tools/scripts/compilemessages.py
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
touch $@
ui-release: $(UI_BUILD_FLAG_FILE)
ui-devel: awx/ui/node_modules
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
@if [ -d "/var/lib/awx" ] ; then \
mkdir -p /var/lib/awx/public/static/css; \
mkdir -p /var/lib/awx/public/static/js; \
mkdir -p /var/lib/awx/public/static/media; \
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
fi
ui-devel-instrumented: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
ui-devel-test: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start
ui-lint:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui lint
$(NPM_BIN) run --prefix awx/ui prettier-check
ui-test:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui test
ui-test-screens:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui test-screens --runInBand
ui-test-general:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
HEADLESS ?= no
ifeq ($(HEADLESS), yes)
dist/$(SDIST_TAR_FILE):
else
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
endif
$(PYTHON) -m build -s
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
@ -747,16 +677,6 @@ kind-dev-load: awx-kube-dev-build
# Translation TASKS
# --------------------------------------
## generate UI .pot file, an empty template of strings yet to be translated
pot: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
## generate UI .po files for each locale (will update translated strings for `en`)
po: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
## generate API django .pot .po
messages:
@if [ "$(VENV_BASE)" ]; then \
@ -803,6 +723,6 @@ help/generate:
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
@printf "\n"
## Display help for ui-next targets
help/ui-next:
@$(MAKE) -s help MAKEFILE_LIST="awx/ui_next/Makefile"
## Display help for ui targets
help/ui:
@$(MAKE) -s help MAKEFILE_LIST="awx/ui/Makefile"

View File

@ -35,7 +35,4 @@ We ask all of our community members and contributors to adhere to the [Ansible c
Get Involved
------------
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
- Join the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com)
- Join the [Ansible Community Forum](https://forum.ansible.com)
We welcome your feedback and ideas. See the [AWX Communication guide](https://ansible.readthedocs.io/projects/awx/en/latest/contributor/communication.html) to learn how to join the conversation.

View File

@ -61,23 +61,6 @@ else:
from django.db import connection
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
commands.append(f[:-4])
except OSError:
pass
return commands
def oauth2_getattribute(self, attr):
# Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
@ -106,10 +89,6 @@ def prepare_env():
if not settings.DEBUG: # pragma: no cover
warnings.simplefilter('ignore', DeprecationWarning)
# Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management
django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import
@ -117,35 +96,6 @@ def prepare_env():
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external
# program via unit tests.
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
# Use the default devserver addr/port defined in settings for runserver.
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
from django.core.management.commands import runserver as core_runserver
original_handle = core_runserver.Command.handle
def handle(self, *args, **options):
if not options.get('addrport'):
options['addrport'] = '%s:%d' % (default_addr, int(default_port))
elif options.get('addrport').isdigit():
options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
return original_handle(self, *args, **options)
core_runserver.Command.handle = handle
def manage():
# Prepare the AWX environment.

View File

@ -826,7 +826,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct()

View File

@ -1038,7 +1038,9 @@ class UserSerializer(BaseSerializer):
# as the modified user then inject a session key derived from
# the updated user to prevent logout. This is the logic used by
# the Django admin's own user_change_password view.
update_session_auth_hash(self.context['request'], obj)
if self.instance and self.context['request'].user.username == obj.username:
update_session_auth_hash(self.context['request'], obj)
elif not obj.password:
obj.set_unusable_password()
obj.save(update_fields=['password'])
@ -2905,7 +2907,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
{
"role": {
"id": None,
"name": _("System Auditor"),
"name": _("Controller System Auditor"),
"description": _("Can view all aspects of the system"),
"user_capabilities": {"unattach": False},
},

View File

@ -33,7 +33,6 @@ from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext_lazy as _
# Django REST Framework
from rest_framework.exceptions import APIException, PermissionDenied, ParseError, NotFound
from rest_framework.parsers import FormParser
@ -48,8 +47,8 @@ from rest_framework import status
from rest_framework_yaml.parsers import YAMLParser
from rest_framework_yaml.renderers import YAMLRenderer
# ANSIConv
import ansiconv
# ansi2html
from ansi2html import Ansi2HTMLConverter
# Python Social Auth
from social_core.backends.utils import load_backends
@ -130,7 +129,6 @@ from awx.api.views.mixin import (
from awx.api.pagination import UnifiedJobEventPagination
from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views')
@ -2394,9 +2392,12 @@ class JobTemplateList(ListCreateAPIView):
def check_permissions(self, request):
if request.method == 'POST':
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
if request.user.is_anonymous:
self.permission_denied(request)
else:
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
super(JobTemplateList, self).check_permissions(request)
@ -3121,9 +3122,12 @@ class WorkflowJobTemplateList(ListCreateAPIView):
def check_permissions(self, request):
if request.method == 'POST':
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
if request.user.is_anonymous:
self.permission_denied(request)
else:
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
super(WorkflowJobTemplateList, self).check_permissions(request)
@ -4205,7 +4209,8 @@ class UnifiedJobStdout(RetrieveAPIView):
# Remove any ANSI escape sequences containing job event data.
content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content)
body = ansiconv.to_html(html.escape(content))
conv = Ansi2HTMLConverter()
body = conv.convert(html.escape(content))
context = {'title': get_view_name(self.__class__), 'body': mark_safe(body), 'dark': dark_bg, 'content_only': content_only}
data = render_to_string('api/stdout.html', context).strip()

View File

@ -285,9 +285,6 @@ class ApiV2ConfigView(APIView):
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
# Guarding against settings.UI_NEXT being set to a non-boolean value
ui_next_state = settings.UI_NEXT if settings.UI_NEXT in (True, False) else False
data = dict(
time_zone=settings.TIME_ZONE,
license_info=license_data,
@ -296,7 +293,6 @@ class ApiV2ConfigView(APIView):
analytics_status=pendo_state,
analytics_collectors=all_collectors(),
become_methods=PRIVILEGE_ESCALATION_METHODS,
ui_next=ui_next_state,
)
# If LDAP is enabled, user_ldap_fields will return a list of field

View File

@ -242,9 +242,10 @@ class BaseAccess(object):
return qs
def filtered_queryset(self):
# Override in subclasses
# filter objects according to user's read access
return self.model.objects.none()
if permission_registry.is_registered(self.model):
return self.model.access_qs(self.user, 'view')
else:
raise NotImplementedError('Filtered queryset for model is not written')
def can_read(self, obj):
return bool(obj and self.get_queryset().filter(pk=obj.pk).exists())
@ -606,9 +607,6 @@ class InstanceGroupAccess(BaseAccess):
model = InstanceGroup
prefetch_related = ('instances',)
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_use(self, obj):
return self.user in obj.use_role
@ -654,7 +652,7 @@ class UserAccess(BaseAccess):
qs = User.objects.all()
else:
qs = (
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
User.objects.filter(pk__in=Organization.access_qs(self.user, 'view').values('member_role__members'))
| User.objects.filter(pk=self.user.id)
| User.objects.filter(is_superuser=True)
).distinct()
@ -671,7 +669,7 @@ class UserAccess(BaseAccess):
return True
if not settings.MANAGE_ORGANIZATION_AUTH:
return False
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.access_qs(self.user, 'change').exists()
def can_change(self, obj, data):
if data is not None and ('is_superuser' in data or 'is_system_auditor' in data):
@ -691,7 +689,7 @@ class UserAccess(BaseAccess):
"""
Returns all organizations that count `u` as a member
"""
return Organization.accessible_objects(u, 'member_role')
return Organization.access_qs(u, 'member')
def is_all_org_admin(self, u):
"""
@ -774,7 +772,7 @@ class OAuth2ApplicationAccess(BaseAccess):
prefetch_related = ('organization', 'oauth2accesstoken_set')
def filtered_queryset(self):
org_access_qs = Organization.accessible_objects(self.user, 'member_role')
org_access_qs = Organization.access_qs(self.user, 'member')
return self.model.objects.filter(organization__in=org_access_qs)
def can_change(self, obj, data):
@ -787,7 +785,7 @@ class OAuth2ApplicationAccess(BaseAccess):
if self.user.is_superuser:
return True
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.access_qs(self.user, 'change').exists()
return self.check_related('organization', Organization, data, role_field='admin_role', mandatory=True)
@ -855,9 +853,6 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
# organization admin_role is not a parent of organization auditor_role
notification_attach_roles = ['admin_role', 'auditor_role']
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_change(self, obj, data):
if data and data.get('default_environment'):
@ -925,9 +920,6 @@ class InventoryAccess(BaseAccess):
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
)
def filtered_queryset(self, allowed=None, ad_hoc=None):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_use(self, obj):
return self.user in obj.use_role
@ -936,7 +928,7 @@ class InventoryAccess(BaseAccess):
def can_add(self, data):
# If no data is specified, just checking for generic add permission?
if not data:
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
return Organization.access_qs(self.user, 'add_inventory').exists()
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
@check_superuser
@ -998,7 +990,7 @@ class HostAccess(BaseAccess):
def can_add(self, data):
if not data: # So the browseable API will work
return Inventory.accessible_objects(self.user, 'admin_role').exists()
return Inventory.access_qs(self.user, 'change').exists()
# Checks for admin or change permission on inventory.
if not self.check_related('inventory', Inventory, data):
@ -1060,7 +1052,7 @@ class GroupAccess(BaseAccess):
def can_add(self, data):
if not data: # So the browseable API will work
return Inventory.accessible_objects(self.user, 'admin_role').exists()
return Inventory.access_qs(self.user, 'change').exists()
if 'inventory' not in data:
return False
# Checks for admin or change permission on inventory.
@ -1102,7 +1094,7 @@ class InventorySourceAccess(NotificationAttachMixin, UnifiedCredentialsMixin, Ba
def can_add(self, data):
if not data or 'inventory' not in data:
return Inventory.accessible_objects(self.user, 'admin_role').exists()
return Inventory.access_qs(self.user, 'change').exists()
if not self.check_related('source_project', Project, data, role_field='use_role'):
return False
@ -1216,9 +1208,6 @@ class CredentialAccess(BaseAccess):
)
prefetch_related = ('admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members', 'credential_type', 'organization')
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
@ -1329,7 +1318,7 @@ class TeamAccess(BaseAccess):
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.access_qs(self.user, 'view').exists()
if not settings.MANAGE_ORGANIZATION_AUTH:
return False
return self.check_related('organization', Organization, data)
@ -1400,13 +1389,15 @@ class ExecutionEnvironmentAccess(BaseAccess):
def filtered_queryset(self):
return ExecutionEnvironment.objects.filter(
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
Q(organization__in=Organization.access_ids_qs(self.user, 'view'))
| Q(organization__isnull=True)
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
).distinct()
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'execution_environment_admin_role').exists()
return Organization.access_qs(self.user, 'add_executionenvironment').exists()
return self.check_related('organization', Organization, data, mandatory=True, role_field='execution_environment_admin_role')
@check_superuser
@ -1419,7 +1410,13 @@ class ExecutionEnvironmentAccess(BaseAccess):
else:
if self.user not in obj.organization.execution_environment_admin_role:
raise PermissionDenied
return self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role')
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
return False
# Special case that check_related does not catch, org users can not remove the organization from the EE
if data and ('organization' in data or 'organization_id' in data):
if (not data.get('organization')) and (not data.get('organization_id')):
return False
return True
def can_delete(self, obj):
if obj.managed:
@ -1449,13 +1446,10 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
prefetch_related = ('modified_by', 'created_by', 'organization', 'last_job', 'current_job')
notification_attach_roles = ['admin_role']
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
return Organization.access_qs(self.user, 'add_project').exists()
if data.get('default_environment'):
ee = get_object_from_data('default_environment', ExecutionEnvironment, data)
@ -1551,9 +1545,6 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
Prefetch('last_job', queryset=UnifiedJob.objects.non_polymorphic()),
)
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
def can_add(self, data):
"""
a user can create a job template if
@ -1566,7 +1557,7 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
"""
if not data: # So the browseable API will work
return Project.accessible_objects(self.user, 'use_role').exists()
return Project.access_qs(self.user, 'use_project').exists()
# if reference_obj is provided, determine if it can be copied
reference_obj = data.get('reference_obj', None)
@ -1757,13 +1748,13 @@ class JobAccess(BaseAccess):
def filtered_queryset(self):
qs = self.model.objects
qs_jt = qs.filter(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role'))
qs_jt = qs.filter(job_template__in=JobTemplate.access_qs(self.user, 'view'))
org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
if not org_access_qs.exists():
return qs_jt
return qs.filter(Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) | Q(organization__in=org_access_qs)).distinct()
return qs.filter(Q(job_template__in=JobTemplate.access_qs(self.user, 'view')) | Q(organization__in=org_access_qs)).distinct()
def can_add(self, data, validate_license=True):
raise NotImplementedError('Direct job creation not possible in v2 API')
@ -1852,6 +1843,11 @@ class SystemJobTemplateAccess(BaseAccess):
model = SystemJobTemplate
def filtered_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
return self.model.objects.none()
@check_superuser
def can_start(self, obj, validate_license=True):
'''Only a superuser can start a job from a SystemJobTemplate'''
@ -1964,7 +1960,7 @@ class WorkflowJobTemplateNodeAccess(UnifiedCredentialsMixin, BaseAccess):
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'workflow_job_template')
def filtered_queryset(self):
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.access_qs(self.user, 'view'))
@check_superuser
def can_add(self, data):
@ -2079,9 +2075,6 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
'read_role',
)
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_add(self, data):
"""
@ -2092,7 +2085,7 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
"""
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
return Organization.access_qs(self.user, 'add_workflowjobtemplate').exists()
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
if data.get('organization', None) is None:
@ -2652,13 +2645,13 @@ class NotificationTemplateAccess(BaseAccess):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.model.access_qs(self.user, 'view')
return self.model.objects.filter(
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct()
@check_superuser
def can_add(self, data):
if not data:
return Organization.accessible_objects(self.user, 'notification_admin_role').exists()
return Organization.access_qs(self.user, 'add_notificationtemplate').exists()
return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True)
@check_superuser
@ -2686,7 +2679,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role'))
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate'))
| Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct()
@ -2802,11 +2795,7 @@ class ActivityStreamAccess(BaseAccess):
if credential_set:
q |= Q(credential__in=credential_set)
auditing_orgs = (
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
.distinct()
.values_list('id', flat=True)
)
auditing_orgs = (Organization.access_qs(self.user, 'change') | Organization.access_qs(self.user, 'audit')).distinct().values_list('id', flat=True)
if auditing_orgs:
q |= (
Q(user__in=auditing_orgs.values('member_role__members'))

View File

@ -66,10 +66,8 @@ class FixedSlidingWindow:
class RelayWebsocketStatsManager:
def __init__(self, event_loop, local_hostname):
def __init__(self, local_hostname):
self._local_hostname = local_hostname
self._event_loop = event_loop
self._stats = dict()
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
self.start()
def start(self):
self.async_task = self._event_loop.create_task(self.run_loop())
self.async_task = asyncio.get_running_loop().create_task(
self.run_loop(),
name='RelayWebsocketStatsManager.run_loop',
)
return self.async_task
@classmethod

View File

@ -843,22 +843,12 @@ register(
hidden=True,
)
register(
'UI_NEXT',
field_class=fields.BooleanField,
default=False,
label=_('Enable Preview of New User Interface'),
help_text=_('Enable preview of new user interface.'),
category=_('System'),
category_slug='system',
hidden=True,
)
register(
'SUBSCRIPTION_USAGE_MODEL',
field_class=fields.ChoiceField,
choices=[
('', _('Default model for AWX - no subscription. Deletion of host_metrics will not be considered for purposes of managed host counting')),
('', _('No subscription. Deletion of host_metrics will not be considered for purposes of managed host counting')),
(
SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS,
_('Usage based on unique managed nodes in a large historical time frame and delete functionality for no longer used managed nodes'),
@ -929,6 +919,16 @@ register(
category_slug='debug',
)
register(
'RECEPTOR_KEEP_WORK_ON_ERROR',
field_class=fields.BooleanField,
label=_('Keep receptor work on error'),
default=False,
help_text=_('Prevent receptor work from being released on when error is detected'),
category=('Debug'),
category_slug='debug',
)
def logging_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):

View File

@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
}
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL
ERROR_STATES = ('error',)
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
CENSOR_VALUE = '************'
ENV_BLOCKLIST = frozenset(

View File

@ -167,7 +167,7 @@ def migrate_to_new_rbac(apps, schema_editor):
perm.delete()
managed_definitions = dict()
for role_definition in RoleDefinition.objects.filter(managed=True):
for role_definition in RoleDefinition.objects.filter(managed=True).exclude(name__in=(settings.ANSIBLE_BASE_JWT_MANAGED_ROLES)):
permissions = frozenset(role_definition.permissions.values_list('id', flat=True))
managed_definitions[permissions] = role_definition
@ -239,7 +239,7 @@ def migrate_to_new_rbac(apps, schema_editor):
# Create new replacement system auditor role
new_system_auditor, created = RoleDefinition.objects.get_or_create(
name='System Auditor',
name='Controller System Auditor',
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
)
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
@ -309,6 +309,16 @@ def setup_managed_role_definitions(apps, schema_editor):
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
)
)
if cls_name == 'team':
managed_role_definitions.append(
get_or_create_managed(
'Controller Team Admin',
f'Has all permissions to a single {cls._meta.verbose_name}',
ct,
indiv_perms,
RoleDefinition,
)
)
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
org_child_perms = object_perms.copy()
@ -349,6 +359,18 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition,
)
)
if action == 'member' and cls_name in ('organization', 'team'):
suffix = to_create['special'].format(cls=cls, action=action.title())
rd_name = f'Controller {suffix}'
managed_role_definitions.append(
get_or_create_managed(
rd_name,
f'Has {action} permissions to a single {cls._meta.verbose_name}',
ct,
perm_list,
RoleDefinition,
)
)
if 'org_admin' in to_create:
managed_role_definitions.append(
@ -360,6 +382,15 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition,
)
)
managed_role_definitions.append(
get_or_create_managed(
'Controller Organization Admin',
'Has all permissions to a single organization and all objects inside of it',
org_ct,
org_perms,
RoleDefinition,
)
)
# Special "organization action" roles
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]

View File

@ -202,7 +202,7 @@ User.add_to_class('created', created)
def get_system_auditor_role():
rd, created = RoleDefinition.objects.get_or_create(
name='System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
name='Controller System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
)
if created:
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))

View File

@ -66,7 +66,3 @@ class ExecutionEnvironment(CommonModel):
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
if actor._meta.model_name == 'team':
organization_cls = self._meta.get_field('organization').related_model
if self.organization not in organization_cls.access_qs(actor, 'view'):
raise ValidationError({'team': _('Team must have view permission to Execution Environment organization')})

View File

@ -557,12 +557,25 @@ def get_role_definition(role):
f = obj._meta.get_field(role.role_field)
action_name = f.name.rsplit("_", 1)[0]
model_print = type(obj).__name__
rd_name = f'{model_print} {action_name.title()} Compat'
perm_list = get_role_codenames(role)
defaults = {
'content_type_id': role.content_type_id,
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
}
# use Controller-specific role definitions for Team/Organization and member/admin
# instead of platform role definitions
# these should exist in the system already, so just do a lookup by role definition name
if model_print in ['Team', 'Organization'] and action_name in ['member', 'admin']:
rd_name = f'Controller {model_print} {action_name.title()}'
rd = RoleDefinition.objects.filter(name=rd_name).first()
if rd:
return rd
else:
return RoleDefinition.objects.create_from_permissions(permissions=perm_list, name=rd_name, managed=True, **defaults)
else:
rd_name = f'{model_print} {action_name.title()} Compat'
with impersonate(None):
try:
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
@ -585,6 +598,12 @@ def get_role_from_object_role(object_role):
model_name, role_name, _ = rd.name.split()
role_name = role_name.lower()
role_name += '_role'
elif rd.name.startswith('Controller') and rd.name.endswith(' Admin'):
# Controller Organization Admin and Controller Team Admin
role_name = 'admin_role'
elif rd.name.startswith('Controller') and rd.name.endswith(' Member'):
# Controller Organization Member and Controller Team Member
role_name = 'member_role'
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
# cases like "Organization Project Admin"
model_name, target_model_name, role_name = rd.name.split()

View File

@ -405,10 +405,11 @@ class AWXReceptorJob:
finally:
# Make sure to always release the work unit if we established it
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
try:
receptor_ctl.simple_command(f"work release {self.unit_id}")
except Exception:
logger.exception(f"Error releasing work unit {self.unit_id}.")
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
try:
receptor_ctl.simple_command(f"work release {self.unit_id}")
except Exception:
logger.exception(f"Error releasing work unit {self.unit_id}.")
def _run_internal(self, receptor_ctl):
# Create a socketpair. Where the left side will be used for writing our payload

View File

@ -54,7 +54,7 @@ from awx.main.models import (
Job,
convert_jsonfields,
)
from awx.main.constants import ACTIVE_STATES
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
from awx.main.dispatch.publish import task
from awx.main.dispatch import get_task_queuename, reaper
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
@ -685,6 +685,8 @@ def awx_receptor_workunit_reaper():
unit_ids = [id for id in receptor_work_list]
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
for job in jobs_with_unreleased_receptor_units:
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
@ -704,7 +706,10 @@ def awx_k8s_reaper():
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
pods = PodManager.list_active_jobs(group)
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
for job in reap_job_candidates:
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
try:
pm = PodManager(job)

View File

@ -32,18 +32,16 @@ class TestImmutableSharedFields:
def test_perform_update(self, admin_user, patch):
orgA = Organization.objects.create(name='orgA')
team = orgA.teams.create(name='teamA')
# allow patching non-shared fields
patch(
url=reverse('api:team_detail', kwargs={'pk': team.id}),
data={"description": "can change this field"},
url=reverse('api:organization_detail', kwargs={'pk': orgA.id}),
data={"max_hosts": 76},
user=admin_user,
expect=200,
)
orgB = Organization.objects.create(name='orgB')
# prevent patching shared fields
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
assert "Cannot change shared field" in resp.data['organization']
resp = patch(url=reverse('api:organization_detail', kwargs={'pk': orgA.id}), data={"name": "orgB"}, user=admin_user, expect=403)
assert "Cannot change shared field" in resp.data['name']
@pytest.mark.parametrize(
'role',

View File

@ -33,6 +33,27 @@ def test_fail_double_create_user(post, admin):
assert response.status_code == 400
@pytest.mark.django_db
def test_creating_user_retains_session(post, admin):
'''
Creating a new user should not refresh a new session id for the current user.
'''
with mock.patch('awx.api.serializers.update_session_auth_hash') as update_session_auth_hash:
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin)
assert response.status_code == 201
assert not update_session_auth_hash.called
@pytest.mark.django_db
def test_updating_own_password_refreshes_session(patch, admin):
'''
Updating your own password should refresh the session id.
'''
with mock.patch('awx.api.serializers.update_session_auth_hash') as update_session_auth_hash:
patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'password': 'newpassword'}, admin, middleware=SessionMiddleware(mock.Mock()))
assert update_session_auth_hash.called
@pytest.mark.django_db
def test_create_delete_create_user(post, delete, admin):
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock()))

View File

@ -2,7 +2,6 @@ import pytest
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse as django_reverse
from django.test.utils import override_settings
from awx.api.versioning import reverse
from awx.main.models import JobTemplate, Inventory, Organization
@ -68,13 +67,17 @@ def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_ma
@pytest.mark.django_db
def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
# TODO: just a delete_inventory, without change_inventory
rd, _ = RoleDefinition.objects.get_or_create(
name='inventory-delete', permissions=['delete_inventory', 'view_inventory'], content_type=ContentType.objects.get_for_model(Inventory)
name='inventory-delete',
permissions=['delete_inventory', 'view_inventory', 'change_inventory'],
content_type=ContentType.objects.get_for_model(Inventory),
)
rd.give_permission(rando, inventory)
inv_id = inventory.pk
inv_url = reverse('api:inventory_detail', kwargs={'pk': inv_id})
patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
# TODO: eventually this will be valid test, for now ignore
# patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
delete(url=inv_url, user=rando, expect=202)
assert Inventory.objects.get(id=inv_id).pending_deletion
@ -144,9 +147,74 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia
@pytest.mark.django_db
@override_settings(ALLOW_LOCAL_RESOURCE_MANAGEMENT=False)
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
member_rd = RoleDefinition.objects.get(name='Organization Member')
url = django_reverse('roleuserassignment-list')
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
assert 'Not managed locally' in str(r.data)
@pytest.mark.django_db
def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get):
'''
Adding user to organization member role via the legacy RBAC endpoints
should give them access to the organization detail
'''
url_detail = reverse('api:organization_detail', kwargs={'pk': organization.id})
get(url_detail, user=bob, expect=403)
role = organization.member_role
url = reverse('api:role_users_list', kwargs={'pk': role.id})
post(url, data={'id': bob.id}, user=admin, expect=204)
get(url_detail, user=bob, expect=200)
@pytest.mark.django_db
@pytest.mark.parametrize('actor', ['user', 'team'])
@pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member'])
def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post):
'''
Prevent user or team from being added to platform-level roles
'''
rd = RoleDefinition.objects.get(name=role_name)
endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list'
url = django_reverse(endpoint)
object_id = team.id if 'Team' in role_name else organization.id
data = {'object_id': object_id, 'role_definition': rd.id}
actor_id = bob.id if actor == 'user' else team.id
data[actor] = actor_id
r = post(url, data=data, user=admin, expect=400)
assert 'Not managed locally' in str(r.data)
@pytest.mark.django_db
@pytest.mark.parametrize('role_name', ['Controller Team Admin', 'Controller Team Member'])
def test_adding_user_to_controller_team_roles(setup_managed_roles, role_name, team, admin, bob, post, get):
'''
Allow user to be added to Controller Team Admin or Controller Team Member
'''
url_detail = reverse('api:team_detail', kwargs={'pk': team.id})
get(url_detail, user=bob, expect=403)
rd = RoleDefinition.objects.get(name=role_name)
url = django_reverse('roleuserassignment-list')
post(url, data={'object_id': team.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
get(url_detail, user=bob, expect=200)
@pytest.mark.django_db
@pytest.mark.parametrize('role_name', ['Controller Organization Admin', 'Controller Organization Member'])
def test_adding_user_to_controller_organization_roles(setup_managed_roles, role_name, organization, admin, bob, post, get):
'''
Allow user to be added to Controller Organization Admin or Controller Organization Member
'''
url_detail = reverse('api:organization_detail', kwargs={'pk': organization.id})
get(url_detail, user=bob, expect=403)
rd = RoleDefinition.objects.get(name=role_name)
url = django_reverse('roleuserassignment-list')
post(url, data={'object_id': organization.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
get(url, user=bob, expect=200)

View File

@ -1,6 +1,6 @@
import pytest
from ansible_base.rbac.models import RoleDefinition, DABPermission
from ansible_base.rbac.models import RoleDefinition, DABPermission, RoleUserAssignment
@pytest.mark.django_db
@ -29,3 +29,34 @@ def test_org_child_add_permission(setup_managed_roles):
# special case for JobTemplate, anyone can create one with use permission to project/inventory
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
@pytest.mark.django_db
def test_controller_specific_roles_have_correct_permissions(setup_managed_roles):
'''
Controller specific roles should have the same permissions as the platform roles
e.g. Controller Team Admin should have same permission set as Team Admin
'''
for rd_name in ['Controller Team Admin', 'Controller Team Member', 'Controller Organization Member', 'Controller Organization Admin']:
rd = RoleDefinition.objects.get(name=rd_name)
rd_platform = RoleDefinition.objects.get(name=rd_name.split('Controller ')[1])
assert set(rd.permissions.all()) == set(rd_platform.permissions.all())
@pytest.mark.django_db
@pytest.mark.parametrize('resource_name', ['Team', 'Organization'])
@pytest.mark.parametrize('action', ['Member', 'Admin'])
def test_legacy_RBAC_uses_controller_specific_roles(setup_managed_roles, resource_name, action, team, bob, organization):
'''
Assignment to legacy RBAC roles should use controller specific role definitions
e.g. Controller Team Admin, Controller Team Member, Controller Organization Member, Controller Organization Admin
'''
resource = team if resource_name == 'Team' else organization
if action == 'Member':
resource.member_role.members.add(bob)
else:
resource.admin_role.members.add(bob)
rd = RoleDefinition.objects.get(name=f'Controller {resource_name} {action}')
rd_platform = RoleDefinition.objects.get(name=f'{resource_name} {action}')
assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists()
assert not RoleUserAssignment.objects.filter(role_definition=rd_platform, user=bob, object_id=resource.id).exists()

View File

@ -192,3 +192,24 @@ def test_user_auditor_rel(organization, rando, setup_managed_roles):
audit_rd = RoleDefinition.objects.get(name='Organization Audit')
audit_rd.give_permission(rando, organization)
assert list(rando.auditor_of_organizations) == [organization]
@pytest.mark.django_db
@pytest.mark.parametrize('resource_name', ['Organization', 'Team'])
@pytest.mark.parametrize('role_name', ['Member', 'Admin'])
def test_mapping_from_controller_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles):
"""
ensure mappings for controller roles are correct
e.g.
Controller Organization Member > organization.member_role
Controller Organization Admin > organization.admin_role
Controller Team Member > team.member_role
Controller Team Admin > team.admin_role
"""
resource = organization if resource_name == 'Organization' else team
old_role_name = f"{role_name.lower()}_role"
getattr(resource, old_role_name).members.add(rando)
assignment = RoleUserAssignment.objects.get(user=rando)
assert assignment.role_definition.name == f'Controller {resource_name} {role_name}'
old_role = get_role_from_object_role(assignment.object_role)
assert old_role.id == getattr(resource, old_role_name).id

View File

@ -1,5 +1,4 @@
import glob
import json
import os
from django.conf import settings
@ -12,122 +11,98 @@ except ImportError:
from pip._internal.req.constructors import parse_req_from_line
def test_python_and_js_licenses():
def index_licenses(path):
# Check for GPL (forbidden) and LGPL (need to ship source)
# This is not meant to be an exhaustive check.
def check_license(license_file):
with open(license_file) as f:
data = f.read()
is_lgpl = 'GNU LESSER GENERAL PUBLIC LICENSE' in data.upper()
# The LGPL refers to the GPL in-text
# Case-sensitive for GPL to match license text and not PSF license reference
is_gpl = 'GNU GENERAL PUBLIC LICENSE' in data and not is_lgpl
return (is_gpl, is_lgpl)
def check_license(license_file):
with open(license_file) as f:
data = f.read()
is_lgpl = 'GNU LESSER GENERAL PUBLIC LICENSE' in data.upper()
is_gpl = 'GNU GENERAL PUBLIC LICENSE' in data and not is_lgpl
return is_gpl, is_lgpl
def find_embedded_source_version(path, name):
files = os.listdir(path)
tgz_files = [f for f in files if f.endswith('.tar.gz')]
for tgz in tgz_files:
pkg_name = tgz.split('-')[0].split('_')[0]
if pkg_name == name:
return tgz.split('-')[1].split('.tar.gz')[0]
return None
list = {}
for txt_file in glob.glob('%s/*.txt' % path):
filename = txt_file.split('/')[-1]
name = filename[:-4].lower()
(is_gpl, is_lgpl) = check_license(txt_file)
list[name] = {
'name': name,
'filename': filename,
'gpl': is_gpl,
'source_required': (is_gpl or is_lgpl),
'source_version': find_embedded_source_version(path, name),
}
return list
def find_embedded_source_version(path, name):
files = os.listdir(path)
tgz_files = [f for f in files if f.endswith('.tar.gz')]
for tgz in tgz_files:
pkg_name = tgz.split('-')[0].split('_')[0]
if pkg_name == name:
return tgz.split('-')[1].split('.tar.gz')[0]
return None
def read_api_requirements(path):
ret = {}
skip_pbr_license_check = False
for req_file in ['requirements.txt', 'requirements_git.txt']:
fname = '%s/%s' % (path, req_file)
for reqt in parse_requirements(fname, session=''):
parsed_requirement = parse_req_from_line(reqt.requirement, None)
name = parsed_requirement.requirement.name
version = str(parsed_requirement.requirement.specifier)
if version.startswith('=='):
version = version[2:]
if parsed_requirement.link:
if str(parsed_requirement.link).startswith(('http://', 'https://')):
(name, version) = str(parsed_requirement.requirement).split('==', 1)
else:
(name, version) = parsed_requirement.link.filename.split('@', 1)
if name.endswith('.git'):
name = name[:-4]
if name == 'receptor':
name = 'receptorctl'
if name == 'ansible-runner':
skip_pbr_license_check = True
ret[name] = {'name': name, 'version': version}
if 'pbr' in ret and skip_pbr_license_check:
del ret['pbr']
return ret
def index_licenses(path):
licenses = {}
for txt_file in glob.glob(f'{path}/*.txt'):
filename = os.path.basename(txt_file)
name = filename[:-4].lower()
is_gpl, is_lgpl = check_license(txt_file)
licenses[name] = {
'name': name,
'filename': filename,
'gpl': is_gpl,
'source_required': is_gpl or is_lgpl,
'source_version': find_embedded_source_version(path, name),
}
return licenses
def read_ui_requirements(path):
def json_deps(jsondata):
ret = {}
deps = jsondata.get('dependencies', {})
for key in deps.keys():
key = key.lower()
devonly = deps[key].get('dev', False)
if not devonly:
if key not in ret.keys():
depname = key.replace('/', '-')
if depname[0] == '@':
depname = depname[1:]
ret[depname] = {'name': depname, 'version': deps[key]['version']}
ret.update(json_deps(deps[key]))
return ret
with open('%s/package-lock.json' % path) as f:
jsondata = json.load(f)
return json_deps(jsondata)
def parse_requirement(reqt):
parsed_requirement = parse_req_from_line(reqt.requirement, None)
name = parsed_requirement.requirement.name
version = str(parsed_requirement.requirement.specifier)
if version.startswith('=='):
version = version[2:]
if parsed_requirement.link:
if str(parsed_requirement.link).startswith(('http://', 'https://')):
name, version = str(parsed_requirement.requirement).split('==', 1)
else:
name, version = parsed_requirement.link.filename.split('@', 1)
if name.endswith('.git'):
name = name[:-4]
if name == 'receptor':
name = 'receptorctl'
return name, version
def remediate_licenses_and_requirements(licenses, requirements):
errors = []
items = list(licenses.keys())
items.sort()
for item in items:
if item not in [r.lower() for r in requirements.keys()] and item != 'awx':
errors.append(" license file %s does not correspond to an existing requirement; it should be removed." % (licenses[item]['filename'],))
continue
# uWSGI has a linking exception
if licenses[item]['gpl'] and item != 'uwsgi':
errors.append(" license for %s is GPL. This software cannot be used." % (item,))
if licenses[item]['source_required']:
version = requirements[item]['version']
if version != licenses[item]['source_version']:
errors.append(" embedded source for %s is %s instead of the required version %s" % (item, licenses[item]['source_version'], version))
elif licenses[item]['source_version']:
errors.append(" embedded source version %s for %s is included despite not being needed" % (licenses[item]['source_version'], item))
items = list(requirements.keys())
items.sort()
for item in items:
if item.lower() not in licenses.keys():
errors.append(" license for requirement %s is missing" % (item,))
return errors
base_dir = settings.BASE_DIR
api_licenses = index_licenses('%s/../licenses' % base_dir)
ui_licenses = index_licenses('%s/../licenses/ui' % base_dir)
api_requirements = read_api_requirements('%s/../requirements' % base_dir)
ui_requirements = read_ui_requirements('%s/ui' % base_dir)
def read_api_requirements(path):
requirements = {}
skip_pbr_license_check = False
for req_file in ['requirements.txt', 'requirements_git.txt']:
fname = f'{path}/{req_file}'
for reqt in parse_requirements(fname, session=''):
name, version = parse_requirement(reqt)
if name == 'ansible-runner':
skip_pbr_license_check = True
requirements[name] = {'name': name, 'version': version}
if 'pbr' in requirements and skip_pbr_license_check:
del requirements['pbr']
return requirements
def remediate_licenses_and_requirements(licenses, requirements):
errors = []
errors += remediate_licenses_and_requirements(ui_licenses, ui_requirements)
errors += remediate_licenses_and_requirements(api_licenses, api_requirements)
for item in sorted(licenses.keys()):
if item not in [r.lower() for r in requirements.keys()] and item != 'awx':
errors.append(f" license file {licenses[item]['filename']} does not correspond to an existing requirement; it should be removed.")
continue
if licenses[item]['gpl'] and item != 'uwsgi':
errors.append(f" license for {item} is GPL. This software cannot be used.")
if licenses[item]['source_required']:
version = requirements[item]['version']
if version != licenses[item]['source_version']:
errors.append(f" embedded source for {item} is {licenses[item]['source_version']} instead of the required version {version}")
elif licenses[item]['source_version']:
errors.append(f" embedded source version {licenses[item]['source_version']} for {item} is included despite not being needed")
for item in sorted(requirements.keys()):
if item.lower() not in licenses.keys():
errors.append(f" license for requirement {item} is missing")
return errors
def test_python_licenses():
base_dir = settings.BASE_DIR
api_licenses = index_licenses(f'{base_dir}/../licenses')
api_requirements = read_api_requirements(f'{base_dir}/../requirements')
errors = remediate_licenses_and_requirements(api_licenses, api_requirements)
if errors:
raise Exception('Included licenses not consistent with requirements:\n%s' % '\n'.join(errors))

View File

@ -73,11 +73,16 @@ class TestMigrationSmoke:
def test_migrate_DAB_RBAC(self, migrator):
old_state = migrator.apply_initial_migration(('main', '0190_alter_inventorysource_source_and_more'))
Organization = old_state.apps.get_model('main', 'Organization')
Team = old_state.apps.get_model('main', 'Team')
User = old_state.apps.get_model('auth', 'User')
org = Organization.objects.create(name='arbitrary-org', created=now(), modified=now())
user = User.objects.create(username='random-user')
org.read_role.members.add(user)
org.member_role.members.add(user)
team = Team.objects.create(name='arbitrary-team', organization=org, created=now(), modified=now())
team.member_role.members.add(user)
new_state = migrator.apply_tested_migration(
('main', '0192_custom_roles'),
@ -85,6 +90,8 @@ class TestMigrationSmoke:
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Organization Member', object_id=org.id).exists()
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Team Member', object_id=team.id).exists()
# Regression testing for bug that comes from current vs past models mismatch
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')

View File

@ -63,6 +63,11 @@ def check_user_capabilities(get, setup_managed_roles):
# ___ begin tests ___
@pytest.mark.django_db
def test_any_user_can_view_global_ee(control_plane_execution_environment, rando):
assert ExecutionEnvironmentAccess(rando).can_read(control_plane_execution_environment)
@pytest.mark.django_db
def test_managed_ee_not_assignable(control_plane_execution_environment, ee_rd, rando, admin_user, post):
url = django_reverse('roleuserassignment-list')
@ -78,27 +83,22 @@ def test_org_member_required_for_assignment(org_ee, ee_rd, rando, admin_user, po
@pytest.mark.django_db
def test_team_view_permission_required(org_ee, ee_rd, rando, admin_user, post):
def test_team_can_have_permission(org_ee, ee_rd, rando, admin_user, post):
org2 = Organization.objects.create(name='a different team')
team = Team.objects.create(name='a team', organization=org2)
team.member_role.members.add(rando)
assert org_ee not in ExecutionEnvironmentAccess(rando).get_queryset() # user can not view the EE
url = django_reverse('roleteamassignment-list')
r = post(url, {'role_definition': ee_rd.pk, 'team': team.id, 'object_id': org_ee.pk}, user=admin_user, expect=400)
assert 'Team must have view permission to Execution Environment organization' in str(r.data)
org_view_rd = RoleDefinition.objects.create_from_permissions(
name='organization viewer role', permissions=['view_organization'], content_type=ContentType.objects.get_for_model(Organization)
)
org_view_rd.give_permission(team, org_ee.organization)
assert org_ee in ExecutionEnvironmentAccess(rando).get_queryset() # user can view the EE now
url = django_reverse('roleteamassignment-list')
# can give object roles to the team now
post(url, {'role_definition': ee_rd.pk, 'team': team.id, 'object_id': org_ee.pk}, user=admin_user, expect=201)
assert rando.has_obj_perm(org_ee, 'change')
assert org_ee in ExecutionEnvironmentAccess(rando).get_queryset() # user can view the EE now
@pytest.mark.django_db
def test_give_object_permission_to_ee(org_ee, ee_rd, org_member, check_user_capabilities):
def test_give_object_permission_to_ee(setup_managed_roles, org_ee, ee_rd, org_member, check_user_capabilities):
access = ExecutionEnvironmentAccess(org_member)
assert access.can_read(org_ee) # by virtue of being an org member
assert not access.can_change(org_ee, {'name': 'new'})
@ -130,7 +130,7 @@ def test_need_related_organization_access(org_ee, ee_rd, org_member):
@pytest.mark.django_db
@pytest.mark.parametrize('style', ['new', 'old'])
def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_capabilities, style, org_ee_rd):
def test_give_org_permission_to_ee(setup_managed_roles, org_ee, organization, org_member, check_user_capabilities, style, org_ee_rd):
access = ExecutionEnvironmentAccess(org_member)
assert not access.can_change(org_ee, {'name': 'new'})
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
@ -143,3 +143,6 @@ def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_
assert access.can_change(org_ee, {'name': 'new', 'organization': organization.id})
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': True})
# Extra check, user can not remove the EE from the organization
assert not access.can_change(org_ee, {'name': 'new', 'organization': None})

View File

@ -2,7 +2,7 @@ import pytest
from rest_framework.exceptions import PermissionDenied
from awx.main.access import JobAccess, JobLaunchConfigAccess, AdHocCommandAccess, InventoryUpdateAccess, ProjectUpdateAccess
from awx.main.access import JobAccess, JobLaunchConfigAccess, AdHocCommandAccess, InventoryUpdateAccess, ProjectUpdateAccess, SystemJobTemplateAccess
from awx.main.models import (
Job,
JobLaunchConfig,
@ -350,3 +350,21 @@ class TestLaunchConfigAccess:
assert access.can_use(config)
assert rando.can_access(JobLaunchConfig, 'use', config)
@pytest.mark.django_db
class TestSystemJobTemplateAccess:
def test_system_job_template_auditor(self, system_auditor, system_job_template):
access = SystemJobTemplateAccess(system_auditor)
assert access.can_read(system_job_template)
assert not access.can_start(system_job_template)
def test_system_job_template_rando(self, rando, system_job_template):
access = SystemJobTemplateAccess(rando)
assert not access.can_read(system_job_template)
assert not access.can_start(system_job_template)
def test_system_job_template_superuser(self, admin_user, system_job_template):
access = SystemJobTemplateAccess(admin_user)
assert access.can_read(system_job_template)
assert access.can_start(system_job_template)

View File

@ -5,7 +5,7 @@ from django.contrib.auth.models import User
from django.forms.models import model_to_dict
from rest_framework.exceptions import ParseError
from awx.main.access import BaseAccess, check_superuser, JobTemplateAccess, WorkflowJobTemplateAccess, SystemJobTemplateAccess, vars_are_encrypted
from awx.main.access import BaseAccess, check_superuser, JobTemplateAccess, WorkflowJobTemplateAccess, vars_are_encrypted
from awx.main.models import (
Credential,
@ -239,14 +239,3 @@ def test_user_capabilities_method():
foo = object()
foo_capabilities = foo_access.get_user_capabilities(foo, ['edit', 'copy'])
assert foo_capabilities == {'edit': 'bar', 'copy': 'foo'}
def test_system_job_template_can_start(mocker):
user = mocker.MagicMock(spec=User, id=1, is_system_auditor=True, is_superuser=False)
assert user.is_system_auditor
access = SystemJobTemplateAccess(user)
assert not access.can_start(None)
user.is_superuser = True
access = SystemJobTemplateAccess(user)
assert access.can_start(None)

View File

@ -1259,7 +1259,6 @@ class TestJobCredentials(TestJobExecution):
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["turbo_button"] == "True"
return ['successful', 0]
def test_custom_environment_injectors_with_nested_extra_vars(self, private_data_dir, job, mock_me):
task = jobs.RunJob()

View File

@ -1,5 +1,7 @@
import re
from functools import reduce
from django.core.exceptions import FieldDoesNotExist
from pyparsing import (
infixNotation,
opAssoc,
@ -353,7 +355,7 @@ class SmartFilter(object):
try:
res = boolExpr.parseString('(' + filter_string + ')')
except ParseException:
except (ParseException, FieldDoesNotExist):
raise RuntimeError(u"Invalid query %s" % filter_string_raw)
if len(res) > 0:

View File

@ -450,7 +450,12 @@ class Licenser(object):
if first_host:
automated_since = int(first_host.first_automation.timestamp())
else:
automated_since = int(Instance.objects.order_by('id').first().created.timestamp())
try:
automated_since = int(Instance.objects.order_by('id').first().created.timestamp())
except AttributeError:
# In the odd scenario that create_preload_data was not run, there are no hosts
# Then we CAN end up here before any instance has registered
automated_since = int(time.time())
instance_count = int(attrs.get('instance_count', 0))
attrs['current_instances'] = current_instances
attrs['automated_instances'] = automated_instances

View File

@ -8,7 +8,7 @@ import ipaddress
import aiohttp
from aiohttp import client_exceptions
import aioredis
import redis
from channels.layers import get_channel_layer
@ -47,7 +47,6 @@ class WebsocketRelayConnection:
verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT,
):
self.name = name
self.event_loop = asyncio.get_event_loop()
self.stats = stats
self.remote_host = remote_host
self.remote_port = remote_port
@ -110,7 +109,10 @@ class WebsocketRelayConnection:
self.stats.record_connection_lost()
def start(self):
self.async_task = self.event_loop.create_task(self.connect())
self.async_task = asyncio.get_running_loop().create_task(
self.connect(),
name=f"WebsocketRelayConnection.connect.{self.name}",
)
return self.async_task
def cancel(self):
@ -121,7 +123,10 @@ class WebsocketRelayConnection:
# metrics messages
# the "metrics" group is not subscribed to in the typical fashion, so we
# just explicitly create it
producer = self.event_loop.create_task(self.run_producer("metrics", websocket, "metrics"))
producer = asyncio.get_running_loop().create_task(
self.run_producer("metrics", websocket, "metrics"),
name="WebsocketRelayConnection.run_producer.metrics",
)
self.producers["metrics"] = {"task": producer, "subscriptions": {"metrics"}}
async for msg in websocket:
self.stats.record_message_received()
@ -143,7 +148,10 @@ class WebsocketRelayConnection:
name = f"{self.remote_host}-{group}"
origin_channel = payload['origin_channel']
if not self.producers.get(name):
producer = self.event_loop.create_task(self.run_producer(name, websocket, group))
producer = asyncio.get_running_loop().create_task(
self.run_producer(name, websocket, group),
name=f"WebsocketRelayConnection.run_producer.{name}",
)
self.producers[name] = {"task": producer, "subscriptions": {origin_channel}}
logger.debug(f"Producer {name} started.")
else:
@ -191,7 +199,7 @@ class WebsocketRelayConnection:
return
continue
except aioredis.errors.ConnectionClosedError:
except redis.exceptions.ConnectionError:
logger.info(f"Producer {name} lost connection to Redis, shutting down.")
return
@ -297,9 +305,7 @@ class WebSocketRelayManager(object):
pass
async def run(self):
event_loop = asyncio.get_running_loop()
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
self.stats_mgr = RelayWebsocketStatsManager(self.local_hostname)
self.stats_mgr.start()
database_conf = deepcopy(settings.DATABASES['default'])
@ -323,7 +329,10 @@ class WebSocketRelayManager(object):
)
await async_conn.set_autocommit(True)
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
on_ws_heartbeat_task = asyncio.get_running_loop().create_task(
self.on_ws_heartbeat(async_conn),
name="WebSocketRelayManager.on_ws_heartbeat",
)
# Establishes a websocket connection to /websocket/relay on all API servers
while True:

View File

@ -91,8 +91,7 @@ USE_L10N = True
USE_TZ = True
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'ui', 'build', 'static'),
os.path.join(BASE_DIR, 'ui_next', 'build'),
os.path.join(BASE_DIR, 'ui', 'build'),
os.path.join(BASE_DIR, 'static'),
]
@ -323,9 +322,8 @@ TEMPLATES = [
},
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'ui', 'build'),
os.path.join(BASE_DIR, 'ui', 'public'),
os.path.join(BASE_DIR, 'ui_next', 'build', 'awx'),
os.path.join(BASE_DIR, 'ui', 'build', 'awx'),
],
},
]
@ -662,6 +660,9 @@ AWX_AUTO_DEPROVISION_INSTANCES = False
# e.g. organizations, teams, and users
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
# If True, allow users to be assigned to roles that were created via JWT
ALLOW_LOCAL_ASSIGNING_JWT_ROLES = False
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
# Note: This setting may be overridden by database settings.
PENDO_TRACKING_STATE = "off"
@ -1009,12 +1010,14 @@ AWX_RUNNER_KEEPALIVE_SECONDS = 0
# Delete completed work units in receptor
RECEPTOR_RELEASE_WORK = True
RECPETOR_KEEP_WORK_ON_ERROR = False
# K8S only. Use receptor_log_level on AWX spec to set this properly
RECEPTOR_LOG_LEVEL = 'info'
MIDDLEWARE = [
'django_guid.middleware.guid_middleware',
'ansible_base.lib.middleware.logging.log_request.LogTracebackMiddleware',
'awx.main.middleware.SettingsCacheMiddleware',
'awx.main.middleware.TimingMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
@ -1089,8 +1092,6 @@ AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
CLUSTER_HOST_ID = socket.gethostname()
UI_NEXT = True
# License compliance for total host count. Possible values:
# - '': No model - Subscription not counted from Host Metrics
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)

View File

View File

@ -1,11 +0,0 @@
jest.*.js
webpack.*.js
etc
coverage
build
node_modules
dist
images
instrumented
*test*.js

View File

@ -1,168 +0,0 @@
{
"parser": "@babel/eslint-parser",
"ignorePatterns": ["./node_modules/"],
"parserOptions": {
"requireConfigFile": false,
"ecmaVersion": 6,
"sourceType": "module",
"ecmaFeatures": {
"jsx": true,
"modules": true
},
"babelOptions": {
"presets": ["@babel/preset-react"]
}
},
"plugins": ["react-hooks", "jsx-a11y", "i18next", "@babel"],
"extends": [
"airbnb",
"prettier",
"plugin:jsx-a11y/strict",
"plugin:i18next/recommended"
],
"settings": {
"react": {
"version": "detect"
},
"import/resolver": {
"node": {
"paths": ["src"]
}
}
},
"env": {
"browser": true,
"node": true,
"jest": true
},
"globals": {
"window": true
},
"rules": {
"i18next/no-literal-string": [
2,
{
"markupOnly": true,
"ignoreAttribute": [
"data-testid",
"dateFieldName",
"timeFieldName",
"to",
"streamType",
"path",
"component",
"variant",
"key",
"position",
"promptName",
"color",
"promptId",
"headingLevel",
"size",
"target",
"autoComplete",
"trigger",
"from",
"name",
"fieldId",
"css",
"gutter",
"dataCy",
"tooltipMaxWidth",
"mode",
"aria-labelledby",
"aria-hidden",
"aria-controls",
"aria-pressed",
"sortKey",
"ouiaId",
"credentialTypeNamespace",
"link",
"value",
"credentialTypeKind",
"linkTo",
"scrollToAlignment",
"displayKey",
"sortedColumnKey",
"maxHeight",
"maxWidth",
"role",
"aria-haspopup",
"dropDirection",
"resizeOrientation",
"src",
"theme",
"gridColumns",
"rows",
"href",
"modifier",
"data-cy",
"fieldName",
"splitButtonVariant",
"pageKey",
"textId",
"rel"
],
"ignore": [
"Ansible",
"Tower",
"JSON",
"YAML",
"lg",
"hh:mm AM/PM",
"Twilio"
],
"ignoreComponent": [
"AboutModal",
"code",
"Omit",
"PotentialLink",
"TypeRedirect",
"Radio",
"RunOnRadio",
"NodeTypeLetter",
"SelectableItem",
"Dash",
"Plural"
],
"ignoreCallee": ["describe"]
}
],
"camelcase": "off",
"arrow-parens": "off",
"comma-dangle": "off",
// https://github.com/benmosher/eslint-plugin-import/issues/479#issuecomment-252500896
"import/no-extraneous-dependencies": "off",
"max-len": [
"error",
{
"code": 100,
"ignoreStrings": true,
"ignoreTemplateLiterals": true
}
],
"no-continue": "off",
"no-debugger": "off",
"no-mixed-operators": "off",
"no-param-reassign": "off",
"no-plusplus": "off",
"no-underscore-dangle": "off",
"no-use-before-define": "off",
"no-multiple-empty-lines": ["error", { "max": 1 }],
"object-curly-newline": "off",
"no-trailing-spaces": ["error"],
"no-unused-expressions": ["error", { "allowShortCircuit": true }],
"react/jsx-props-no-spreading": ["off"],
"react/prefer-stateless-function": "off",
"react/prop-types": "off",
"react/sort-comp": ["error", {}],
"jsx-a11y/label-has-for": "off",
"jsx-a11y/label-has-associated-control": "off",
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "warn",
"react/jsx-filename-extension": "off",
"no-restricted-exports": "off",
"react/function-component-definition": "off",
"prefer-regex-literals": "off"
}
}

View File

@ -1,17 +0,0 @@
{"catalogs":[{
"path": "<rootDir>/locales/{locale}/messages",
"include": ["<rootDir>"],
"exclude": ["**/node_modules/**"]
}],
"compileNamespace": "cjs",
"extractBabelOptions": {},
"compilerBabelOptions": {},
"fallbackLocales": { "default": "en"},
"format": "po",
"locales": ["en","es","fr","ko","nl","zh","ja","zu"],
"orderBy": "messageId",
"pseudoLocale": "zu",
"rootDir": "./src",
"runtimeConfigModule": ["@lingui/core", "i18n"],
"sourceLocale": "en"
}

View File

@ -1 +0,0 @@
engine-strict = true

View File

@ -1,2 +0,0 @@
build
src/locales

View File

@ -1,8 +0,0 @@
{
"printWidth": 80,
"tabWidth": 2,
"semi": true,
"singleQuote": true,
"trailingComma": "es5",
"bracketSpacing": true
}

View File

@ -1,363 +0,0 @@
# Ansible AWX UI With PatternFly
Hi there! We're excited to have you as a contributor.
Have questions about this document or anything not covered here? Feel free to reach out to any of the contributors of this repository.
## Table of contents
- [Ansible AWX UI With PatternFly](#ansible-awx-ui-with-patternfly)
- [Table of contents](#table-of-contents)
- [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
- [Setting up your development environment](#setting-up-your-development-environment)
- [Prerequisites](#prerequisites)
- [Node and npm](#node-and-npm)
- [Build the User Interface](#build-the-user-interface)
- [Accessing the AWX web interface](#accessing-the-awx-web-interface)
- [AWX REST API Interaction](#awx-rest-api-interaction)
- [Handling API Errors](#handling-api-errors)
- [Forms](#forms)
- [Working with React](#working-with-react)
- [App structure](#app-structure)
- [Patterns](#patterns)
- [Bootstrapping the application (root src/ files)](#bootstrapping-the-application-root-src-files)
- [Naming files](#naming-files)
- [Naming components that use the context api](#naming-components-that-use-the-context-api)
- [Class constructors vs Class properties](#class-constructors-vs-class-properties)
- [Binding](#binding)
- [Typechecking with PropTypes](#typechecking-with-proptypes)
- [Custom Hooks](#custom-hooks)
- [Naming Functions](#naming-functions)
- [Default State Initialization](#default-state-initialization)
- [Testing components that use contexts](#testing-components-that-use-contexts)
- [Internationalization](#internationalization)
- [Marking strings for translation and replacement in the UI](#marking-strings-for-translation-and-replacement-in-the-ui)
- [Setting up .po files to give to translation team](#setting-up-po-files-to-give-to-translation-team)
- [Marking an issue to be translated](#marking-an-issue-to-be-translated)
## Things to know prior to submitting code
- All code submissions are done through pull requests against the `devel` branch.
- If collaborating with someone else on the same branch, please use `--force-with-lease` instead of `--force` when pushing up code. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
- We use a [code formatter](https://prettier.io/). Before adding a new commit or opening a PR, please apply the formatter using `npm run prettier`
- We adopt the following code style guide:
- functions should adopt camelCase
- constructors/classes should adopt PascalCase
- constants to be exported should adopt UPPERCASE
- For strings, we adopt the `sentence capitalization` since it is a [Patternfly style guide](https://www.patternfly.org/v4/ux-writing/capitalization).
## Setting up your development environment
The UI is built using [ReactJS](https://reactjs.org/docs/getting-started.html) and [Patternfly](https://www.patternfly.org/).
### Prerequisites
#### Node and npm
The AWX UI requires the following:
- Node >= 16.13.1 LTS
- NPM 8.x
Run the following to install all the dependencies:
```bash
(host) $ npm install
```
#### Build the User Interface
Run the following to build the AWX UI:
```bash
(host) $ npm run start
```
## Accessing the AWX web interface
You can now log into the AWX web interface at [https://127.0.0.1:3001](https://127.0.0.1:3001).
## AWX REST API Interaction
This interface is built on top of the AWX REST API. If a component needs to interact with the API then the model that corresponds to that base endpoint will need to be imported from the api module.
Example:
`import { OrganizationsAPI, UsersAPI } from '../../../api';`
All models extend a `Base` class which provides an interface to the standard HTTP methods (GET, POST, PUT etc). Methods that are specific to that endpoint should be added directly to model's class.
**Mixins** - For related endpoints that apply to several different models a mixin should be used. Mixins are classes with a number of methods and can be used to avoid adding the same methods to a number of different models. A good example of this is the Notifications mixin. This mixin provides generic methods for reading notification templates and toggling them on and off.
Note that mixins can be chained. See the example below.
Example of a model using multiple mixins:
```javascript
import NotificationsMixin from '../mixins/Notifications.mixin';
import InstanceGroupsMixin from '../mixins/InstanceGroups.mixin';
class Organizations extends InstanceGroupsMixin(NotificationsMixin(Base)) {
...
}
export default Organizations;
```
**Testing** - The easiest way to mock the api module in tests is to use jest's [automatic mock](https://jestjs.io/docs/en/es6-class-mocks#automatic-mock). This syntax will replace the class with a mock constructor and mock out all methods to return undefined by default. If necessary, you can still override these mocks for specific tests. See the example below.
Example of mocking a specific method for every test in a suite:
```javascript
import { OrganizationsAPI } from '../../../../src/api';
// Mocks out all available methods. Comparable to:
// OrganizationsAPI.readAccessList = jest.fn();
// but for every available method
jest.mock('../../../../src/api');
// Return a specific mock value for the readAccessList method
beforeEach(() => {
OrganizationsAPI.readAccessList.mockReturnValue({ foo: 'bar' });
});
// Reset mocks
afterEach(() => {
jest.clearAllMocks();
});
...
```
**Test Attributes** -
It should be noted that the `dataCy` prop, as well as its equivalent attribute `data-cy`, are used as flags for any UI test that wants to avoid relying on brittle CSS selectors such as `nth-of-type()`.
## Handling API Errors
API requests can and will fail occasionally so they should include explicit error handling. The three _main_ categories of errors from our perspective are: content loading errors, form submission errors, and other errors. The patterns currently in place for these are described below:
- **content loading errors** - These are any errors that occur when fetching data to initialize a page or populate a list. For these, we conditionally render a _content error component_ in place of the unresolved content.
- **form submission errors** - If an error is encountered when submitting a form, we display the error message on the form. For field-specific validation errors, we display the error message beneath the specific field(s). For general errors, we display the error message at the bottom of the form near the action buttons. An error that happens when requesting data to populate a form is not a form submission error, it is still a content error and is handled as such (see above).
- **other errors** - Most errors will fall into the first two categories, but for miscellaneous actions like toggling notifications, deleting a list item, etc. we display an alert modal to notify the user that their requested action couldn't be performed.
## Forms
Our forms should have a known, consistent, and fully-resolved starting state before it is possible for a user, keyboard-mouse, screen reader, or automated test to interact with them. If multiple network calls are needed to populate a form, resolve them all before displaying the form or showing a content error. When multiple requests are needed to create or update the resources represented by a form, resolve them all before transitioning the ui to a success or failure state.
## Working with React
### App structure
All source code lives in the `/src` directory and all tests are colocated with the components that they test.
Inside these folders, the internal structure is:
- **/api** - All classes used to interact with API's are found here. See [AWX REST API Interaction](#awx-rest-api-interaction) for more information.
- **/components** - All generic components that are meant to be used in multiple contexts throughout awx. Things like buttons, tabs go here.
- **/contexts** - Components which utilize react's context api.
- **/hooks** - Custom react [hooks](https://reactjs.org/docs/hooks-custom.html)
- **/locales** - [Internationalization](#internationalization) config and source files.
- **/screens** - Based on the various routes of awx.
- **/shared** - Components that are meant to be used specifically by a particular route, but might be shareable across pages of that route. For example, a form component which is used on both add and edit screens.
- **/util** - Stateless helper functions that aren't tied to react.
### Patterns
- A **screen** shouldn't import from another screen. If a component _needs_ to be shared between two or more screens, it is a generic and should be moved to `src/components`.
#### Bootstrapping the application (root src/ files)
In the root of `/src`, there are a few files which are used to initialize the react app. These are
- **index.js**
- Connects react app to root dom node.
- Sets up root route structure, navigation grouping and login modal
- Calls base context providers
- Imports .scss styles.
- **app.js**
- Sets standard page layout, about modal, and root dialog modal.
- **RootProvider.js**
- Sets up all context providers.
- Initializes i18n and router
### Naming files
Ideally, files should be named the same as the component they export, and tests with `.test` appended. In other words, `<FooBar>` would be defined in `FooBar.js`, and its tests would be defined in `FooBar.test.js`.
#### Naming components that use the context api
**File naming** - Since contexts export both consumer and provider (and potentially in withContext function form), the file can be simplified to be named after the consumer export. In other words, the file containing the `Network` context components would be named `Network.js`.
**Component naming and conventions** - In order to provide a consistent interface with react-router and [lingui](https://lingui.js.org/), as well as make their usage easier and less verbose, context components follow these conventions:
- Providers are wrapped in a component in the `FooProvider` format.
- The value prop of the provider should be pulled from state. This is recommended by the react docs, [here](https://reactjs.org/docs/context.html#caveats).
- The provider should also be able to accept its value by prop for testing.
- Any sort of code related to grabbing data to put on the context should be done in this component.
- Consumers are wrapped in a component in the `Foo` format.
- If it makes sense, consumers can be exported as a function in the `withFoo()` format. If a component is wrapped in this function, its context values are available on the component as props.
### Class constructors vs Class properties
It is good practice to use constructor-bound instance methods rather than methods as class properties. Methods as arrow functions provide lexical scope and are bound to the Component class instance instead of the class itself. This makes it so we cannot easily test a Component's methods without invoking an instance of the Component and calling the method directly within our tests.
BAD:
```javascript
class MyComponent extends React.Component {
constructor(props) {
super(props);
}
myEventHandler = () => {
// do a thing
};
}
```
GOOD:
```javascript
class MyComponent extends React.Component {
constructor(props) {
super(props);
this.myEventHandler = this.myEventHandler.bind(this);
}
myEventHandler() {
// do a thing
}
}
```
### Binding
It is good practice to bind our class methods within our class constructor method for the following reasons:
1. Avoid defining the method every time `render()` is called.
2. [Performance advantages](https://stackoverflow.com/a/44844916).
3. Ease of [testing](https://github.com/airbnb/enzyme/issues/365).
### Typechecking with PropTypes
Shared components should have their prop values typechecked. This will help catch bugs when components get refactored/renamed.
```javascript
About.propTypes = {
ansible_version: PropTypes.string,
isOpen: PropTypes.bool,
onClose: PropTypes.func.isRequired,
version: PropTypes.string,
};
About.defaultProps = {
ansible_version: null,
isOpen: false,
version: null,
};
```
### Custom Hooks
There are currently a few custom hooks:
1. [useRequest](https://github.com/ansible/awx/blob/devel/awx/ui/src/util/useRequest.js#L21) encapsulates main actions related to requests.
2. [useDismissableError](https://github.com/ansible/awx/blob/devel/awx/ui/src/util/useRequest.js#L71) provides controls for "dismissing" an error message.
3. [useDeleteItems](https://github.com/ansible/awx/blob/devel/awx/ui/src/util/useRequest.js#L98) handles deletion of items from a paginated item list.
4. [useSelected](https://github.com/ansible/awx/blob/devel/awx/ui/src/util/useSelected.js#L14) provides a way to read and update a selected list.
### Naming Functions
Here are the guidelines for how to name functions.
| Naming Convention | Description |
| ----------------- | --------------------------------------------------------------------------------- |
| `handle<x>` | Use for methods that process events |
| `on<x>` | Use for component prop names |
| `toggle<x>` | Use for methods that flip one value to the opposite value |
| `show<x>` | Use for methods that always set a value to show or add an element |
| `hide<x>` | Use for methods that always set a value to hide or remove an element |
| `create<x>` | Use for methods that make API `POST` requests |
| `read<x>` | Use for methods that make API `GET` requests |
| `update<x>` | Use for methods that make API `PATCH` requests |
| `destroy<x>` | Use for methods that make API `DESTROY` requests |
| `replace<x>` | Use for methods that make API `PUT` requests |
| `disassociate<x>` | Use for methods that pass `{ disassociate: true }` as a data param to an endpoint |
| `associate<x>` | Use for methods that pass a resource id as a data param to an endpoint |
| `can<x>` | Use for props dealing with RBAC to denote whether a user has access to something |
### Default State Initialization
When declaring empty initial states, prefer the following instead of leaving them undefined:
```javascript
this.state = {
somethingA: null,
somethingB: [],
somethingC: 0,
somethingD: {},
somethingE: '',
};
```
### Testing components that use contexts
We have several React contexts that wrap much of the app, including those from react-router, lingui, and some of our own. When testing a component that depends on one or more of these, you can use the `mountWithContexts()` helper function found in `testUtils/enzymeHelpers.js`. This can be used just like Enzyme's `mount()` function, except it will wrap the component tree with the necessary context providers and basic stub data.
If you want to stub the value of a context, or assert actions taken on it, you can customize a contexts value by passing a second parameter to `mountWithContexts`. For example, this provides a custom value for the `Config` context:
```javascript
const config = {
custom_virtualenvs: ['foo', 'bar'],
};
mountWithContexts(<OrganizationForm />, {
context: { config },
});
```
Now that these custom virtual environments are available in this `OrganizationForm` test we can assert that the component that displays
them is rendering properly.
The object containing context values looks for five known contexts, identified by the keys `linguiPublisher`, `router`, `config`, `network`, and `dialog` — the latter three each referring to the contexts defined in `src/contexts`. You can pass `false` for any of these values, and the corresponding context will be omitted from your test. For example, this will mount your component without the dialog context:
```javascript
mountWithContexts(<Organization />< {
context: {
dialog: false,
}
});
```
## Internationalization
Internationalization leans on the [lingui](https://github.com/lingui/js-lingui) project. [Official documentation here](https://lingui.js.org/). We use this library to mark our strings for translation. If you want to see this in action you'll need to take the following steps:
### Marking strings for translation and replacement in the UI
The lingui library provides various React helpers for dealing with both marking strings for translation, and replacing strings that have been translated. For consistency and ease of use, we have consolidated on one pattern for the codebase. To set strings to be translated in the UI:
- import the t template tag function from the @lingui/macro package.
- wrap your string using the following format: `` t`String to be translated` ``
**Note:** If you have a variable string with text that needs translating, you must wrap it in `` t`${variable} string` `` where it is defined. Then you must run `npm run extract-strings` to generate new `.po` files and submit those files along with your pull request.
**Note:** We try to avoid the `I18n` consumer, or `i18nMark` function lingui gives us access to in this repo. i18nMark does not actually replace the string in the UI (leading to the potential for untranslated bugs), and the other helpers are redundant. Settling on a consistent, single pattern helps us ease the mental overhead of the need to understand the ins and outs of the lingui API.
**Note:** Pluralization can be complicated so it is best to allow lingui handle cases where we have a string that may need to be pluralized based on number of items, or count. In that case lingui provides a `<Plural>` component, and a `plural()` function. When adding or updating strings in a `<Plural/>` tag you must run `npm run extra-strings` and submit the new `.po` files with your pull request. See documentation [here](https://lingui.js.org/guides/plurals.html?highlight=pluralization).
You can learn more about the ways lingui and its React helpers at [this link](https://lingui.js.org/tutorials/react-patterns.html).
### Setting up .po files to give to translation team
1. Make sure that the languages you intend to translate are set correctly in the `.linguirc` configuration file.
2. `npm run extract-strings` to create .po files for each language specified. The .po files will be placed in src/locales. When updating strings that are used by `<Plural>` or `plural()` you will need to run this command to get the strings to render properly. This command will create `.po` files for each of the supported languages that will need to be committed with your PR.
3. Open up the .po file for the language you want to test and add some translations. In production we would pass this .po file off to the translation team.
4. Once you've edited your .po file (or we've gotten a .po file back from the translation team) run `npm run compile-strings`. This command takes the .po files and turns them into a minified JSON object and can be seen in the `messages.js` file in each locale directory. These files get loaded at the App root level (see: App.js).
5. Change the language in your browser and reload the page. You should see your specified translations in place of English strings.
### Marking an issue to be translated
1. Issues marked with `component:I10n` should not be closed after the issue was fixed.
2. Remove the label `state:needs_devel`.
3. Add the label `state:pending_translations`. At this point, the translations will be batch translated by a maintainer, creating relevant entries in the PO files. Then after those translations have been merged, the issue can be closed.

View File

@ -1,19 +0,0 @@
FROM node:16.13.1
ARG NPMRC_FILE=.npmrc
ENV NPMRC_FILE=${NPMRC_FILE}
ARG TARGET='https://awx:8043'
ENV TARGET=${TARGET}
ENV CI=true
WORKDIR /ui
ADD .eslintignore .eslintignore
ADD .eslintrc.json .eslintrc.json
ADD .linguirc .linguirc
ADD jsconfig.json jsconfig.json
ADD public public
ADD package.json package.json
ADD package-lock.json package-lock.json
COPY ${NPMRC_FILE} .npmrc
RUN npm install
ADD src src
EXPOSE 3001
CMD [ "npm", "start" ]

116
awx/ui/Makefile Normal file
View File

@ -0,0 +1,116 @@
## UI_DIR: Relative path to the directory containing this Makefile
UI_DIR := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))
## Path to your local clone of the UI repo
# NOTE: you will not be able to build within the docker-compose development environment if you use this option
UI_LOCAL ?=
## Git repo and branch to the UI repo
UI_GIT_REPO ?= https://github.com/ansible/ansible-ui.git
UI_GIT_BRANCH ?= main
## Product name to display on the UI used in UI build process
PRODUCT ?= AWX
.PHONY: ui
## Default build target of ui Makefile, builds ui/build
ui: ui/build
.PHONY: ui/build
## Build ui/build
ui/build: $(UI_DIR)/build
## True build target for ui.
$(UI_DIR)/build:
@$(MAKE) $(UI_DIR)/src/build/awx
@echo "=== Copying $(UI_DIR)/src/build to $(UI_DIR)/build ==="
@rm -rf $(UI_DIR)/build
@cp -r $(UI_DIR)/src/build $(UI_DIR)
@echo "=== Done building $(UI_DIR)/build ==="
.PHONY: ui/src/build
## Build ui/src/build
ui/src/build: $(UI_DIR)/src/build/awx
## True target for ui/src/build. Build ui from source.
$(UI_DIR)/src/build/awx: $(UI_DIR)/src $(UI_DIR)/src/node_modules/webpack
@echo "=== Building ui ==="
@cd $(UI_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ ROUTE_PREFIX=/ npm run build:awx
@mv $(UI_DIR)/src/build/awx/index.html $(UI_DIR)/src/build/awx/index_awx.html
.PHONY: ui/src
## Clone or link src of UI to ui/src, will re-clone/link/update if necessary.
ui/src: $(UI_DIR)/src
# TODO: Rewrite this big bash script in a more readable way.
## True target for ui/src.
$(UI_DIR)/src:
@echo "=== Setting up $(UI_DIR)/src ==="
@if [ ! -z "$(UI_LOCAL)" ]; then \
if [ -d $(UI_DIR)/src ]; then \
if [ "$$(readlink $(UI_DIR)/src)" = "$(UI_LOCAL)" ]; then \
echo "SKIP: ui/src. $(UI_DIR)/src already linked to $(UI_LOCAL)."; \
else \
echo "=== Linking $(UI_DIR)/src to $(UI_LOCAL) ==="; \
rm -rf $(UI_DIR)/src; \
ln -s $(UI_LOCAL) $(UI_DIR)/src; \
fi; \
else \
echo "=== Linking $(UI_DIR)/src to $(UI_LOCAL) ==="; \
ln -s $(UI_LOCAL) $(UI_DIR)/src; \
fi; \
elif [ ! -z "$(UI_GIT_REPO)" ]; then \
if [ -d $(UI_DIR)/src ]; then \
GIT_REMOTE_ORIGIN=$$(cd $(UI_DIR)/src && git remote get-url origin); \
GIT_REMOTE_BRANCH=$$(cd $(UI_DIR)/src && git rev-parse --abbrev-ref HEAD); \
if [ "$$GIT_REMOTE_ORIGIN" = "$(UI_GIT_REPO)" ] && [ "$$GIT_REMOTE_BRANCH" = "$(UI_GIT_BRANCH)" ]; then \
echo "=== Updating $(UI_DIR)/src from $(UI_GIT_BRANCH) of $(UI_GIT_REPO) ==="; \
git fetch && git pull; \
else \
echo "=== Cloning $(UI_DIR)/src from $(UI_GIT_BRANCH) of $(UI_GIT_REPO) ==="; \
rm -rf $(UI_DIR)/src; \
git clone --depth 1 --branch $(UI_GIT_BRANCH) $(UI_GIT_REPO) $(UI_DIR)/src || true; \
fi; \
else \
echo "=== Cloning $(UI_DIR)/src from $(UI_GIT_BRANCH) of $(UI_GIT_REPO) ==="; \
git clone --depth 1 --branch $(UI_GIT_BRANCH) $(UI_GIT_REPO) $(UI_DIR)/src || true; \
fi; \
else \
echo "FAILED: ui/src. UI_LOCAL and UI_GIT_REPO are not set."; \
exit 1; \
fi
.PHONY: ui/src/webpack
## Install webpack.
ui/src/webpack: $(UI_DIR)/src/node_modules/webpack
## True target for ui/src/webpack.
$(UI_DIR)/src/node_modules/webpack:
@echo "=== Installing webpack ==="
@cd $(UI_DIR)/src && npm install webpack
.PHONY: clean/ui
## Clean ui
clean/ui: clean/ui/build clean/ui/src
.PHONY: clean/ui/src
## Clean ui src
clean/ui/src:
rm -rf $(UI_DIR)/src
.PHONY: clean/ui/build
## Clean ui build
clean/ui/build:
rm -rf $(UI_DIR)/build
.PHONY: $(UI_DIR)/clean
## Alias for clean/ui, so we can run `make clean` directly in ui
$(UI_DIR)/clean: clean/ui
.PHONY: $(UI_DIR)/clean/src
## Alias for clean/ui/src, so we can run `make clean/src` directly in ui
$(UI_DIR)/clean/src: clean/ui/src
.PHONY: $(UI_DIR)/clean/build
## Alias for clean/ui/build, so we can run `make clean/build` directly in ui
$(UI_DIR)/clean/build: clean/ui/build

View File

@ -1,115 +1,47 @@
# AWX-UI
# Instruction to build ui directly from this directory
## Requirements
- node >= 16.13.1, npm >= 8.x make, git
## Set src of the ui repo
## Development
The API development server will need to be running. See [CONTRIBUTING.md](../../CONTRIBUTING.md).
### Via GIT
```shell
# install
npm --prefix=awx/ui install
# Start the ui development server. While running, the ui will be reachable
# at https://127.0.0.1:3001 and updated automatically when code changes.
npm --prefix=awx/ui start
```bash
export UI_GIT_REPO=https://<git repo>
```
### Build for the Development Containers
If you just want to build a ui for the container-based awx development
environment and do not need to work on the ui code, use these make targets:
or
```shell
# The ui will be reachable at https://localhost:8043 or
# http://localhost:8013
make ui-devel
# clean up
make clean-ui
```bash
export UI_GIT_REPO=git@<git repo>
```
### Using an External Server
If you normally run awx on an external host/server (in this example, `awx.local`),
you'll need use the `TARGET` environment variable when starting the ui development
server:
optionally set branch (default is main)
```shell
TARGET='https://awx.local:8043' npm --prefix awx/ui start
```bash
export UI_GIT_BRANCH=main
```
## Testing
```shell
# run code formatting check
npm --prefix awx/ui run prettier-check
### Via symlink to existing clone
# run lint checks
npm --prefix awx/ui run lint
NOTE: UI_LOCAL have higher precedence than UI_GIT_REPO, if UI_LOCAL is set, UI_GIT_REPO will be ignored.
# run all unit tests
npm --prefix awx/ui run test
# run a single test (in this case the login page test):
npm --prefix awx/ui test -- src/screens/Login/Login.test.jsx
# start the test watcher and run tests on files that you've changed
npm --prefix awx/ui run test-watch
# start the tests and get the coverage report after the tests have completed
npm --prefix awx/ui run test -- --coverage
```
#### Note:
- Once the test watcher is up and running you can hit `a` to run all the tests.
- All commands are run on your host machine and not in the api development containers.
## Updating Dependencies
It is not uncommon to run the ui development tooling outside of the awx development
container. That said, dependencies should always be modified from within the
container to ensure consistency.
```shell
# make sure the awx development container is running and open a shell
docker exec -it tools_awx_1 bash
# start with a fresh install of the current dependencies
(tools_awx_1)$ make clean-ui && npm --prefix=awx/ui ci
# add an exact development dependency
(tools_awx_1)$ npm --prefix awx/ui install --save-dev --save-exact dev-package@1.2.3
# add an exact production dependency
(tools_awx_1)$ npm --prefix awx/ui install --save --save-exact prod-package@1.23
# remove a development dependency
(tools_awx_1)$ npm --prefix awx/ui uninstall --save-dev dev-package
# remove a production dependency
(tools_awx_1)$ npm --prefix awx/ui uninstall --save prod-package
# exit the container
(tools_awx_1)$ exit
# add the updated package.json and package-lock.json files to scm
git add awx/ui/package.json awx/ui/package-lock.json
```
#### Note:
- Building the ui can use up a lot of resources. If you're running docker for mac or similar
virtualization, the default memory limit may not be enough and you should increase it.
## Building for Production
```shell
# built files are placed in awx/ui/build
npm --prefix awx/ui run build
```bash
export UI_LOCAL = /path/to/your/ui
```
## CI Container
## Build
To run:
```shell
cd awx/awx/ui
docker build -t awx-ui .
docker run --name tools_ui_1 --network _sources_default --link 'tools_awx_1:awx' -e TARGET="https://awx:8043" -p '3001:3001' --rm -v $(pwd)/src:/ui/src awx-ui
```bash
make ui
```
**Note:** This is for CI, test systems, zuul, etc. For local development, see [usage](https://github.com/ansible/awx/blob/devel/awx/ui/README.md#Development)
## Rebuild
```bash
make -B ui
```
## Clean
```bash
make clean/ui
```

View File

@ -1,416 +0,0 @@
# Simple Search
## UX Considerations
Historically, the code that powers search in the AngularJS version of the AWX UI is very complex and prone to bugs. In order to reduce that complexity, we've made some UX decisions to help make the code easier to maintain.
**ALL query params namespaced and in url bar**
This includes lists that aren't necessarily hyperlinked, like lookup lists. The reason behind this is so we can treat the url bar as the source of truth for queries always. Any params that have both a key AND value that is in the defaultParams section of the qs config are stripped out of the search string (see "Encoding for UI vs. API" for more info on this point)
**Django fuzzy search (`?search=`) is not accessible outside of "advanced search"**
In current smart search typing a term with no key utilizes `?search=` i.e. for "foo" tag, `?search=foo` is given. `?search=` looks on a static list of field name "guesses" (such as name, description, etc.), as well as specific fields as defined for each endpoint (for example, the events endpoint looks for a "stdout" field as well). Due to the fact a key will always be present on the left-hand of simple search, it doesn't make sense to use `?search=` as the default.
We may allow passing of `?search=` through our future advanced search interface. Some details that were gathered in planning phases about `?search=` that might be helpful in the future:
- `?search=` tags are OR'd together (union is returned).
- `?search=foo&name=bar` returns items that have a name field of bar (not case insensitive) AND some text field with foo on it
- `?search=foo&search=bar&name=baz` returns (foo in name OR foo in description OR ...) AND (bar in name OR bar in description OR ...) AND (baz in name)
- similarly `?related__search=` looks on the static list of "guesses" for models related to the endpoint. The specific fields are not "searched" for `?related__search=`.
- `?related__search=` not currently used in awx ui
**A note on clicking a tag to putting it back into the search bar**
This was brought up as a nice to have when we were discussing our initial implementation of search in the new application. Since there isn't a way we would be able to know if the user created the tag from the simple or advanced search interface, we wouldn't know where to put it back. This breaks our idea of using the query params as the exclusive source of truth, so we've decided against implementing it for now.
## Tasklist
### DONE
- DONE update handleSearch to follow handleSort param
- DONE update qsConfig columns to utilize isSearchable bool (just like isSortable bool)
- DONE enter keydown in text search bar to search
- DONE get decoded params and write test
- DONE make list header component
- DONE make filter component
- DONE make filters show up for empty list
- DONE make clear all button
- DONE styling of FilterTags component
- DONE clear out text input after tag has been made
- DONE deal with duplicate key tags being added/removed in qs util file
- DONE deal with widgetry changing between one dropdown option to the left of search and many
- DONE bug: figure out why ?name=org returning just org not “org 2”
- DONE update contrib file to have the first section with updated text as is in this pr description.
- DONE rebase with latest awx-pf changes
- DONE styling of search bar
- DONE make filter and list header tests
- DONE change api paramsSerializer to handle duplicate key stuff
- DONE update qs update function to be smaller, simple param functions, as opposed to one big one with a lot of params
- DONE add search filter removal test for qs.
- DONE remove button for search tags of duplicate keys are broken, fix that
### TODO pre-holiday break
- Update COLUMNS to SORT_COLUMNS and SEARCH_COLUMNS
- Update to using new PF Toolbar component (currently an experimental component)
- Change the right-hand input based on the type of key selected on the left-hand side. In addition to text input, for our MVP we will support:
- number input
- select input (multiple-choice configured from UI or Options)
- Update the following lists to have the following keys:
**Jobs list** (signed off earlier in chat)
- Name (which is also the name of the job template) - search is ?name=jt
- Job ID - search is ?id=13
- Label name - search is ?labels\_\_name=foo
- Job type (dropdown on right with the different types) ?type = job
- Created by (username) - search is ?created_by\_\_username=admin
- Status - search (dropdown on right with different statuses) is ?status=successful
Instances of jobs list include:
- Jobs list
- Host completed jobs list
- JT completed jobs list
**Organization list**
- Name - search is ?name=org
- ? Team name (of a team in the org) - search is ?teams\_\_name=ansible
- ? Username (of a user in the org) - search is ?users\_\_username=johndoe
Instances of orgs list include:
- Orgs list
- User orgs list
- Lookup on Project
- Lookup on Credential
- Lookup on Inventory
- User access add wizard list
- Team access add wizard list
**Instance Groups list**
- Name - search is ?name=ig
- ? is_container_group boolean choice (doesn't work right now in API but will soon) - search is ?is_container_group=true
- ? credential name - search is ?credentials\_\_name=kubey
Instance of instance groups list include:
- Lookup on Org
- Lookup on JT
- Lookup on Inventory
**Users list**
- Username - search is ?username=johndoe
- First Name - search is ?first_name=John
- Last Name - search is ?last_name=Doe
- ? (if not superfluous, would not include on Team users list) Team Name - search is ?teams\_\_name=team_of_john_does (note API issue: User has no field named "teams")
- ? (only for access or permissions list) Role Name - search is ?roles\_\_name=Admin (note API issue: Role has no field "name")
- ? (if not superfluous, would not include on Organization users list) ORg Name - search is ?organizations\_\_name=org_of_jhn_does
Instance of user lists include:
- User list
- Org user list
- Access list for Org, JT, Project, Credential, Inventory, User and Team
- Access list for JT
- Access list Project
- Access list for Credential
- Access list for Inventory
- Access list for User
- Access list for Team
- Team add users list
- Users list in access wizard (to add new roles for a particular list) for Org
- Users list in access wizard (to add new roles for a particular list) for JT
- Users list in access wizard (to add new roles for a particular list) for Project
- Users list in access wizard (to add new roles for a particular list) for Credential
- Users list in access wizard (to add new roles for a particular list) for Inventory
**Teams list**
- Name - search is ?name=teamname
- ? Username (of a user in the team) - search is ?users\_\_username=johndoe
- ? (if not superfluous, would not include on Organizations teams list) Org Name - search is ?organizations\_\_name=org_of_john_does
Instance of team lists include:
- Team list
- Org team list
- User team list
- Team list in access wizard (to add new roles for a particular list) for Org
- Team list in access wizard (to add new roles for a particular list) for JT
- Team list in access wizard (to add new roles for a particular list) for Project
- Team list in access wizard (to add new roles for a particular list) for Credential
- Team list in access wizard (to add new roles for a particular list) for Inventory
**Credentials list**
- Name
- ? Type (dropdown on right with different types)
- ? Created by (username)
- ? Modified by (username)
Instance of credential lists include:
- Credential list
- Lookup for JT
- Lookup for Project
- User access add wizard list
- Team access add wizard list
**Projects list**
- Name - search is ?name=proj
- ? Type (dropdown on right with different types) - search is scm_type=git
- ? SCM URL - search is ?scm_url=github.com/ansible/test-playbooks
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of project lists include:
- Project list
- Lookup for JT
- User access add wizard list
- Team access add wizard list
**Templates list**
- Name - search is ?name=cleanup
- ? Type (dropdown on right with different types) - search is ?type=playbook_run
- ? Playbook name - search is ?job_template\_\_playbook=debug.yml
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of template lists include:
- Template list
- Project Templates list
**Inventories list**
- Name - search is ?name=inv
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of inventory lists include:
- Inventory list
- Lookup for JT
- User access add wizard list
- Team access add wizard list
**Groups list**
- Name - search is ?name=group_name
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of group lists include:
- Group list
**Hosts list**
- Name - search is ?name=hostname
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of host lists include:
- Host list
**Notifications list**
- Name - search is ?name=notification_template_name
- ? Type (dropdown on right with different types) - search is ?type=slack
- ? Created by (username) - search is ?created_by\_\_username=admin
- ? Modified by (username) - search is ?modified_by\_\_username=admin
Instance of notification lists include:
- Org notification list
- JT notification list
- Project notification list
### TODO backlog
- Change the right-hand input based on the type of key selected on the left-hand side. We will eventually want to support:
- lookup input (selection of particular resources, based on API list endpoints)
- date picker input
- Update the following lists to have the following keys:
- Update all **name and **username related field search-based keys to be type-ahead lookup based searches
## Code Details
### Search component
The component looks like this:
```
<Search
qsConfig={qsConfig}
columns={columns}
onSearch={onSearch}
/>
```
**qsConfig** is used to get namespace so that multiple lists can be on the page. When tags are modified they append namespace to query params. The qsConfig is also used to get "type" of fields in order to correctly parse values as int or date as it is translating.
**columns** are passed as an array, as defined in the screen where the list is located. You pass a bool `isDefault` to indicate that should be the key that shows up in the left-hand dropdown as default in the UI. If you don't pass any columns, a default of `isDefault=true` will be added to a name column, which is nearly universally shared throughout the models of awx.
There is a type attribute that can be `'string'`, `'number'` or `'choice'` (and in the future, `'date'` and `'lookup'`), which will change the type of input on the right-hand side of the search bar. For a key that has a set number of choices, you will pass a choices attribute, which is an array in the format choices: [{label: 'Foo', value: 'foo'}]
**onSearch** calls the `mergeParams` qs util in order to add new tags to the queryset. mergeParams is used so that we can support duplicate keys (see mergeParams vs. replaceParams for more info).
### ListHeader component
`DataListToolbar`, `EmptyListControls`, and `FilterTags` components were created or moved to a new sub-component of `PaginatedDataList`, `ListHeader`. This allowed us to consolidate the logic between both lists with data (which need to show search, sort, any search tags currently active, and actions) as well as empty lists (which need to show search tags currently active so they can be removed, potentially getting you back to a "list-has-data" state, as well as a subset of options still valid, such as "add").
The ability to search and remove filters, as well as sort the list is handled through callbacks which are passed from functions defined in `ListHeader`. These are the following:
- `handleSort(key, direction)` - use key and direction of sort to change the order_by value in the queryset
- `handleSearch(key, value)` - use key and value to push a new value to the param
- `handleRemove(key, value)` - use key and value to remove a value to the param
- `handleRemoveAll()` - remove all non-default params
All of these functions act on the react-router history using the `pushHistoryState` function. This causes the query params in the url to update, which in turn triggers change handlers that will re-fetch data for the lists.
**a note on sort_columns and search_columns**
We have split out column configuration into separate search and sort column array props--these are passed to the search and sort columns. Both accept an isDefault prop for one of the items in the array to be the default option selected when going to the page. Sort column items can pass an isNumeric boolean in order to change the iconography of the sort UI element. Search column items can pass type and if applicable choices, in order to configure the right-hand side of the search bar.
### FilterTags component
Similar to the way the list grabs data based on changes to the react-router params, the `FilterTags` component updates when new params are added. This component is a fairly straight-forward map (only slightly complex, because it needed to do a nested map over any values with duplicate keys that were represented by an inner-array). Both key and value are displayed for the tag.
### qs utility
The qs (queryset) utility is used to make the search speak the language of the REST API. The main functions of the utilities are to:
- add, replace and remove filters
- translate filters as url params (for linking and maintaining state), in-memory representation (as JS objects), and params that Django REST Framework understands.
More info in the below sections:
#### Encoding for UI vs. API
For the UI url params, we want to only encode those params that aren't defaults, as the default behavior was defined through configuration and we don't need these in the url as a source of truth. For the API, we need to pass these params so that they are taken into account when the response is built.
#### mergeParams vs. replaceParams
**mergeParams** is used to support putting values with the same key
From a UX perspective, we wanted to be able to support searching on the same key multiple times (i.e. searching for things like `?foo=bar&foo=baz`). We do this by creating an array of all values. i.e.:
```
{
foo: ['bar', 'baz']
}
```
Concatenating terms in this way gives you the intersection of both terms (i.e. foo must be "bar" and "baz"). This is helpful for the most-common type of searching, substring (`__icontains`) searches. This will increase filtering, allowing the user to drill-down into the list as terms are added.
**replaceParams** is used to support sorting, setting page_size, etc. These params only allow one choice, and we need to replace a particular key's value if one is passed.
#### Working with REST API
The REST API is coupled with the qs util through the `paramsSerializer`, due to the fact we need axios to support the array for duplicate key values in the object representation of the params to pass to the get request. This is done where axios is configured in the Base.js file, so all requests and request types should support our array syntax for duplicate keys automatically.
# Advanced Search - this section is a mess, update eventually
**a note on typing in a smart search query**
In order to not support a special "language" or "syntax" for crafting the query like we have now (and is the cause of a large amount of bugs), we will not support the old way of typing in a filter like in the current implementation of search.
Since all search bars are represented in the url, for users who want to input a string to filter results in a single step, typing directly in the url to achieve the filter is acceptable.
# Advanced search notes
Current thinking is Advanced Search will be post-3.6, or at least late 3.6 after awx features and "simple search" with the left dropdown and right input for the above phase 1 lists.
That being said, we want to plan it out so we make sure the infrastructure of how we set up adding/removing tags, what shows up in the url bar, etc. all doesn't have to be redone.
Users will get to advanced search with a button to the right of search bar. When selected type-ahead key thing opens, left dropdown of search bar goes away, and x is given to get back to regular search (this is in the mockups)
It is okay to only make this typing representation available initially (i.e. they start doing stuff with the type-ahead and the phases, no more typing in to make a query that way).
when you click through or type in the search bar for the various phases of crafting the query ("not", "related resource project", "related resource key name", "value foo") which might be represented in the top bar as a series of tags that can be added and removed before submitting the tag.
We will try to form options data from a static file. Because options data is static, we may be able to generate and store as a static file of some sort (that we can use for managing smart search). Alan had ideas around this. If we do this it will mean we don't have to make a ton of requests as we craft smart search filters. It sounds like the cli may start using something similar.
## Smart search flow
Smart search will be able to craft the tag through various states. Note that the phases don't necessarily need to be completed in sequential order.
PHASE 1: prefix operators
**TODO: Double check there's no reason we need to include or** and chain** and can just do not\_\_**
- not\_\_
- or\_\_
- chain\_\_
how these work:
To exclude results matching certain criteria, prefix the field parameter with not\_\_:
?not**field=value
By default, all query string filters are AND'ed together, so only the results matching all filters will be returned. To combine results matching any one of multiple criteria, prefix each query string parameter with or**:
?or**field=value&or**field=othervalue
?or**not**field=value&or**field=othervalue
(Added in Ansible Controller 1.4.5) The default AND filtering applies all filters simultaneously to each related object being filtered across database relationships. The chain filter instead applies filters separately for each related object. To use, prefix the query string parameter with chain**:
?chain**related**field=value&chain**related**field2=othervalue
?chain**not**related**field=value&chain**related**field2=othervalue
If the first query above were written as ?related**field=value&related\_\_field2=othervalue, it would return only the primary objects where the same related object satisfied both conditions. As written using the chain filter, it would return the intersection of primary objects matching each condition.
PHASE 2: related fields, given by array, where \_\_search is appended to them, i.e.
```
"related_search_fields": [
"credentials__search",
"labels__search",
"created_by__search",
"modified_by__search",
"notification_templates__search",
"custom_inventory_scripts__search",
"notification_templates_error__search",
"notification_templates_success__search",
"notification_templates_any__search",
"teams__search",
"projects__search",
"inventories__search",
"applications__search",
"workflows__search",
"instance_groups__search"
],
```
PHASE 3: keys, give by object key names for data.actions.GET - type is given for each key which we could use to help craft the value
PHASE 4: after key postfix operators can be
**TODO: will need to figure out which ones we support**
- exact: Exact match (default lookup if not specified).
- iexact: Case-insensitive version of exact.
- contains: Field contains value.
- icontains: Case-insensitive version of contains.
- startswith: Field starts with value.
- istartswith: Case-insensitive version of startswith.
- endswith: Field ends with value.
- iendswith: Case-insensitive version of endswith.
- regex: Field matches the given regular expression.
- iregex: Case-insensitive version of regex.
- gt: Greater than comparison.
- gte: Greater than or equal to comparison.
- lt: Less than comparison.
- lte: Less than or equal to comparison.
- isnull: Check whether the given field or related object is null; expects a boolean value.
- in: Check whether the given field's value is present in the list provided; expects a list of items.
PHASE 5: The value. Based on options, we can give hints or validation based on type of value (like number fields don't accept "foo" or whatever)

View File

@ -1,27 +0,0 @@
# Application Architecture
## Local Storage Integration
The `useStorage` hook integrates with the browser's localStorage api.
It accepts a localStorage key as its only argument and returns a state
variable and setter function for that state variable. The hook enables
bidirectional data transfer between tabs via an event listener that
is registered with the Web Storage api.
![Sequence Diagram for useStorage](images/useStorage.png)
The `useStorage` hook currently lives in the `AppContainer` component. It
can be relocated to a more general location should and if the need
ever arise
## Session Expiration
Session timeout state is communicated to the client in the HTTP(S)
response headers. Every HTTP(S) response is intercepted to read the
session expiration time before being passed into the rest of the
application. A timeout date is computed from the intercepted HTTP(S)
headers and is pushed into local storage, where it can be read using
standard Web Storage apis or other utilities, such as `useStorage`.
![Sequence Diagram for session expiration](images/sessionExpiration.png)

View File

@ -1,64 +0,0 @@
This document is meant to provide some guidance into the functionality of Job Output and its features.
## Overview of the feature/screen. Summary of what it does/is
Joboutput is a feature that allows users to see how their job is doing as it is being run.
This feature displays data sent to the UI via websockets that are connected to several
different endpoints in the API.
The job output has 2 different states that result in different functionality. One state
is when, the job is actively running. There is limited functionality because of how the
job events are processed when they reach the UI. While the job is running, and
output is coming into the UI, the following features turn off:
1. [Search](#Search)- The ability to search the output of a job.
2. [Expand/Collapse](#Expand/Collapse)- The ability to expand and collapse job events, tasks, plays, or even the
job itself. The only part of the job ouput that is not collapsable is the playbook summary (only jobs that
are executed from a Job Template have Expand/Collapse functionality).
The following features are enabled:
1. Follow/unfollow - `Follow` indicates you are streaming the output on the screen
as it comes into the UI. If you see some output that you want to examine closer while the job is running
scroll to it, and click `Unfollow`, and the output will stop streaming onto the screen. This feature is only
enabled when the job is running and is not complete. If the user scrolls up in the output the UI will unfollow.
2. Page up and page down buttons- Use these buttons to navigate quickly up and down the output.
![Running job](images/JobOutput-running.png)
After the job is complete, the Follow/Unfollow button disabled, and Expand/Collapse and Search become enabled.
![Finished job](images/JobOutput-complete.png)
Not all job types are created equal. Some jobs have a concept of parent-child events. Job events can be inside a Task,
a Task can be inside a Play, and a Play inside a Playbook. Leveraging this concept to enable Expand/Collapse for these
job types, allows you to collapse and hide the children of a particular line of output. This parent-child event
relationship only exists on jobs executed from a job template. All other types of jobs do not
have this event concept, and therefore, do not have Expand/Collapse functionality. By default all job
events are expanded.
## How output works generally.
1. Explain the different state components
2. Page up and page down and whats happening in the background.
## Different type of job events, and how they relate to the state object
1. Tasks
2. plays
3. events
## Non-standard cases
1. When an event comes into the output that has a parent, but the parent hasnt arrived yet.
2. When an event with children arrives in output, but the children are not yet present.
## Expand/Collapse
### Expand collapse a single event - how it works and how it changes the state object
### Expand collapse all - how it works and how it changes the state object
## Search
1. During job run
2. After job run

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

View File

@ -1,5 +0,0 @@
{
"compilerOptions": {
"baseUrl": "src"
}
}

39515
awx/ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,121 +0,0 @@
{
"name": "ui",
"homepage": ".",
"private": true,
"engines": {
"node": ">=16.13.1"
},
"dependencies": {
"@lingui/react": "3.14.0",
"@patternfly/patternfly": "4.224.2",
"@patternfly/react-core": "4.276.8",
"@patternfly/react-icons": "4.93.6",
"@patternfly/react-table": "4.113.0",
"ace-builds": "^1.10.1",
"ansi-to-html": "0.7.2",
"axios": "^1.6.7",
"d3": "7.6.1",
"dagre": "^0.8.4",
"dompurify": "2.4.0",
"formik": "2.2.9",
"has-ansi": "5.0.1",
"html-entities": "2.3.2",
"js-yaml": "4.1.0",
"luxon": "^3.1.1",
"prop-types": "^15.8.1",
"react": "17.0.2",
"react-ace": "^10.1.0",
"react-dom": "17.0.2",
"react-error-boundary": "^3.1.4",
"react-router-dom": "^5.3.3",
"react-virtualized": "^9.21.1",
"rrule": "2.7.1",
"styled-components": "5.3.6"
},
"devDependencies": {
"@babel/core": "^7.22.9",
"@babel/eslint-parser": "^7.22.9",
"@babel/eslint-plugin": "^7.22.10",
"@babel/plugin-syntax-jsx": "^7.22.5",
"@babel/polyfill": "^7.12.1",
"@babel/preset-react": "^7.22.5",
"@cypress/instrument-cra": "^1.4.0",
"@lingui/cli": "^3.7.1",
"@lingui/loader": "3.15.0",
"@lingui/macro": "^3.7.1",
"@nteract/mockument": "^1.0.4",
"@testing-library/dom": "^8.18.1",
"@testing-library/jest-dom": "^5.16.2",
"@testing-library/react": "^12.1.5",
"@testing-library/user-event": "14.4.3",
"@wojtekmaj/enzyme-adapter-react-17": "0.6.5",
"babel-plugin-macros": "3.1.0",
"enzyme": "^3.10.0",
"enzyme-adapter-react-16": "^1.14.0",
"enzyme-to-json": "^3.3.5",
"eslint": "^8.7.0",
"eslint-config-airbnb": "19.0.4",
"eslint-config-prettier": "8.3.0",
"eslint-import-resolver-webpack": "0.13.2",
"eslint-plugin-i18next": "5.2.1",
"eslint-plugin-import": "2.25.4",
"eslint-plugin-jsx-a11y": "6.5.1",
"eslint-plugin-react": "7.28.0",
"eslint-plugin-react-hooks": "4.3.0",
"http-proxy-middleware": "^1.0.3",
"jest-websocket-mock": "^2.0.2",
"mock-socket": "^9.1.3",
"prettier": "2.3.2",
"react-scripts": "5.0.1"
},
"scripts": {
"prelint": "lingui compile",
"prestart": "lingui compile",
"prestart-instrumented": "lingui compile",
"pretest": "lingui compile",
"pretest-watch": "lingui compile",
"start": "GENERATE_SOURCEMAP=false ESLINT_NO_DEV_ERRORS=true PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts start",
"start-instrumented": "ESLINT_NO_DEV_ERRORS=true DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start",
"build": "INLINE_RUNTIME_CHUNK=false react-scripts build",
"test": "TZ='UTC' react-scripts test --watchAll=false",
"test-screens": "TZ='UTC' react-scripts test screens --watchAll=false",
"test-general": "TZ='UTC' react-scripts test --testPathIgnorePatterns='<rootDir>/src/screens/' --watchAll=false",
"test-watch": "TZ='UTC' react-scripts test",
"eject": "react-scripts eject",
"lint": "eslint --ext .js --ext .jsx .",
"extract-strings": "lingui extract",
"extract-template": "lingui extract-template",
"compile-strings": "lingui compile",
"prettier": "prettier --write \"src/**/*.{js,jsx,scss}\"",
"prettier-check": "prettier --check \"src/**/*.{js,jsx,scss}\""
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"jest": {
"snapshotSerializers": [
"enzyme-to-json/serializer"
],
"collectCoverageFrom": [
"src/**/*.{js,jsx}",
"testUtils/**/*.{js,jsx}"
],
"coveragePathIgnorePatterns": [
"<rootDir>/src/locales",
"index.js"
],
"transformIgnorePatterns": [
"<rootDir>/node_modules/(?!d3)/",
"<rootDir>/node_modules/(?!has-ansi)/"
]
}
}

View File

@ -1,8 +1,9 @@
<html>
<head><title>UI Next Missing</title></head>
<!DOCTYPE html>
<html lang="en" xml:lang="en">
<head><title>UI Missing</title></head>
<body style="background-color: black; color: white;">
<div style="display: flex; justify-content: center; align-items: center; text-align: center; min-height: 100vh;">
<h1>Oops... Looks like the UI Next wasn't properly built</h1>
<h1>Oops... Looks like the UI wasn't properly built</h1>
</div>
</body>
</html>

View File

@ -1,49 +0,0 @@
<!-- There's multiple layers of templating in this file:
* "< ... >" with % symbols are ejs templates used by react-scripts at build time. These
templates are mainly used to check whether or not we're building a ui for production
versus one that will be sent from the ui dev server. Since this type of template is
applied at build time, it can be used to conditionally render the others.
* "% ... %" are templates used by the react-scripts dev server when serving the ui from
port 3001. These are applied at runtime and only work for development mode.
* "{ ... }" with % symbols and "{{ ... }}" are django templates that only run for
production builds (e.g port 8043) when serving the ui from a webserver.
-->
<% if (process.env.NODE_ENV === 'production') { %>
{% load static %}
<% } %>
<!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<script nonce="{{ csp_nonce }}" type="text/javascript">
window.NONCE_ID = '{{ csp_nonce }}';
</script>
<meta
http-equiv="Content-Security-Policy"
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'self' 'unsafe-inline'; script-src 'self' 'nonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:; worker-src 'self' blob: ;"
/>
<link rel="shortcut icon" href="{% static 'media/favicon.ico' %}" />
<% } else { %>
<link rel="shortcut icon" href="%PUBLIC_URL%/static/media/favicon.ico" />
<% } %>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="AWX"
/>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<% if (process.env.NODE_ENV === 'production') { %>
<style nonce="{{ csp_nonce }}">.app{height: 100%;}</style><div id="app" class="app"></div>
<% } else { %>
<div id="app" style="height: 100%"></div>
<% } %>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,3 +0,0 @@
/* eslint-disable */
// https://d3js.org/d3-collection/ v1.0.7 Copyright 2018 Mike Bostock
!function(n,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(n.d3=n.d3||{})}(this,function(n){"use strict";function t(){}function e(n,e){var r=new t;if(n instanceof t)n.each(function(n,t){r.set(t,n)});else if(Array.isArray(n)){var i,u=-1,o=n.length;if(null==e)for(;++u<o;)r.set(u,n[u]);else for(;++u<o;)r.set(e(i=n[u],u,n),i)}else if(n)for(var s in n)r.set(s,n[s]);return r}function r(){return{}}function i(n,t,e){n[t]=e}function u(){return e()}function o(n,t,e){n.set(t,e)}function s(){}t.prototype=e.prototype={constructor:t,has:function(n){return"$"+n in this},get:function(n){return this["$"+n]},set:function(n,t){return this["$"+n]=t,this},remove:function(n){var t="$"+n;return t in this&&delete this[t]},clear:function(){for(var n in this)"$"===n[0]&&delete this[n]},keys:function(){var n=[];for(var t in this)"$"===t[0]&&n.push(t.slice(1));return n},values:function(){var n=[];for(var t in this)"$"===t[0]&&n.push(this[t]);return n},entries:function(){var n=[];for(var t in this)"$"===t[0]&&n.push({key:t.slice(1),value:this[t]});return n},size:function(){var n=0;for(var t in this)"$"===t[0]&&++n;return n},empty:function(){for(var n in this)if("$"===n[0])return!1;return!0},each:function(n){for(var t in this)"$"===t[0]&&n(this[t],t.slice(1),this)}};var f=e.prototype;function c(n,t){var e=new s;if(n instanceof s)n.each(function(n){e.add(n)});else if(n){var r=-1,i=n.length;if(null==t)for(;++r<i;)e.add(n[r]);else for(;++r<i;)e.add(t(n[r],r,n))}return e}s.prototype=c.prototype={constructor:s,has:f.has,add:function(n){return this["$"+(n+="")]=n,this},remove:f.remove,clear:f.clear,values:f.keys,size:f.size,empty:f.empty,each:f.each},n.nest=function(){var n,t,s,f=[],c=[];function a(r,i,u,o){if(i>=f.length)return null!=n&&r.sort(n),null!=t?t(r):r;for(var s,c,h,l=-1,v=r.length,p=f[i++],y=e(),d=u();++l<v;)(h=y.get(s=p(c=r[l])+""))?h.push(c):y.set(s,[c]);return y.each(function(n,t){o(d,t,a(n,i,u,o))}),d}return s={object:function(n){return a(n,0,r,i)},map:function(n){return a(n,0,u,o)},entries:function(n){return function n(e,r){if(++r>f.length)return e;var i,u=c[r-1];return null!=t&&r>=f.length?i=e.entries():(i=[],e.each(function(t,e){i.push({key:e,values:n(t,r)})})),null!=u?i.sort(function(n,t){return u(n.key,t.key)}):i}(a(n,0,u,o),0)},key:function(n){return f.push(n),s},sortKeys:function(n){return c[f.length-1]=n,s},sortValues:function(t){return n=t,s},rollup:function(n){return t=n,s}}},n.set=c,n.map=e,n.keys=function(n){var t=[];for(var e in n)t.push(e);return t},n.values=function(n){var t=[];for(var e in n)t.push(n[e]);return t},n.entries=function(n){var t=[];for(var e in n)t.push({key:e,value:n[e]});return t},Object.defineProperty(n,"__esModule",{value:!0})});

View File

@ -1,3 +0,0 @@
/* eslint-disable */
// https://d3js.org/d3-dispatch/ v1.0.6 Copyright 2019 Mike Bostock
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((n=n||self).d3=n.d3||{})}(this,function(n){"use strict";var e={value:function(){}};function t(){for(var n,e=0,t=arguments.length,o={};e<t;++e){if(!(n=arguments[e]+"")||n in o||/[\s.]/.test(n))throw new Error("illegal type: "+n);o[n]=[]}return new r(o)}function r(n){this._=n}function o(n,e){return n.trim().split(/^|\s+/).map(function(n){var t="",r=n.indexOf(".");if(r>=0&&(t=n.slice(r+1),n=n.slice(0,r)),n&&!e.hasOwnProperty(n))throw new Error("unknown type: "+n);return{type:n,name:t}})}function i(n,e){for(var t,r=0,o=n.length;r<o;++r)if((t=n[r]).name===e)return t.value}function f(n,t,r){for(var o=0,i=n.length;o<i;++o)if(n[o].name===t){n[o]=e,n=n.slice(0,o).concat(n.slice(o+1));break}return null!=r&&n.push({name:t,value:r}),n}r.prototype=t.prototype={constructor:r,on:function(n,e){var t,r=this._,l=o(n+"",r),u=-1,a=l.length;if(!(arguments.length<2)){if(null!=e&&"function"!=typeof e)throw new Error("invalid callback: "+e);for(;++u<a;)if(t=(n=l[u]).type)r[t]=f(r[t],n.name,e);else if(null==e)for(t in r)r[t]=f(r[t],n.name,null);return this}for(;++u<a;)if((t=(n=l[u]).type)&&(t=i(r[t],n.name)))return t},copy:function(){var n={},e=this._;for(var t in e)n[t]=e[t].slice();return new r(n)},call:function(n,e){if((t=arguments.length-2)>0)for(var t,r,o=new Array(t),i=0;i<t;++i)o[i]=arguments[i+2];if(!this._.hasOwnProperty(n))throw new Error("unknown type: "+n);for(i=0,t=(r=this._[n]).length;i<t;++i)r[i].value.apply(e,o)},apply:function(n,e,t){if(!this._.hasOwnProperty(n))throw new Error("unknown type: "+n);for(var r=this._[n],o=0,i=r.length;o<i;++o)r[o].value.apply(e,t)}},n.dispatch=t,Object.defineProperty(n,"__esModule",{value:!0})});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,3 +0,0 @@
/* eslint-disable */
// https://d3js.org/d3-timer/ v1.0.10 Copyright 2019 Mike Bostock
!function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((t=t||self).d3=t.d3||{})}(this,function(t){"use strict";var n,e,o=0,i=0,r=0,u=1e3,l=0,c=0,f=0,a="object"==typeof performance&&performance.now?performance:Date,s="object"==typeof window&&window.requestAnimationFrame?window.requestAnimationFrame.bind(window):function(t){setTimeout(t,17)};function _(){return c||(s(m),c=a.now()+f)}function m(){c=0}function p(){this._call=this._time=this._next=null}function w(t,n,e){var o=new p;return o.restart(t,n,e),o}function d(){_(),++o;for(var t,e=n;e;)(t=c-e._time)>=0&&e._call.call(null,t),e=e._next;--o}function h(){c=(l=a.now())+f,o=i=0;try{d()}finally{o=0,function(){var t,o,i=n,r=1/0;for(;i;)i._call?(r>i._time&&(r=i._time),t=i,i=i._next):(o=i._next,i._next=null,i=t?t._next=o:n=o);e=t,v(r)}(),c=0}}function y(){var t=a.now(),n=t-l;n>u&&(f-=n,l=t)}function v(t){o||(i&&(i=clearTimeout(i)),t-c>24?(t<1/0&&(i=setTimeout(h,t-a.now()-f)),r&&(r=clearInterval(r))):(r||(l=a.now(),r=setInterval(y,u)),o=1,s(h)))}p.prototype=w.prototype={constructor:p,restart:function(t,o,i){if("function"!=typeof t)throw new TypeError("callback is not a function");i=(null==i?_():+i)+(null==o?0:+o),this._next||e===this||(e?e._next=this:n=this,e=this),this._call=t,this._time=i,v()},stop:function(){this._call&&(this._call=null,this._time=1/0,v())}},t.interval=function(t,n,e){var o=new p,i=n;return null==n?(o.restart(t,n,e),o):(n=+n,e=null==e?_():+e,o.restart(function r(u){u+=i,o.restart(r,i+=n,e),t(u)},n,e),o)},t.now=_,t.timeout=function(t,n,e){var o=new p;return n=null==n?0:+n,o.restart(function(e){o.stop(),t(e+n)},n,e),o},t.timer=w,t.timerFlush=d,Object.defineProperty(t,"__esModule",{value:!0})});

View File

@ -1,197 +0,0 @@
import React, { useEffect } from 'react';
import {
useRouteMatch,
useLocation,
HashRouter,
Route,
Switch,
Redirect,
useHistory,
} from 'react-router-dom';
import { ErrorBoundary } from 'react-error-boundary';
import { I18nProvider } from '@lingui/react';
import { i18n } from '@lingui/core';
import { Card, PageSection } from '@patternfly/react-core';
import {
ConfigProvider,
useAuthorizedPath,
useUserProfile,
} from 'contexts/Config';
import { SessionProvider, useSession } from 'contexts/Session';
import AppContainer from 'components/AppContainer';
import Background from 'components/Background';
import ContentError from 'components/ContentError';
import NotFound from 'screens/NotFound';
import Login from 'screens/Login';
import { isAuthenticated } from 'util/auth';
import { getLanguageWithoutRegionCode } from 'util/language';
import Metrics from 'screens/Metrics';
import SubscriptionEdit from 'screens/Setting/Subscription/SubscriptionEdit';
import useTitle from 'hooks/useTitle';
import { dynamicActivate, locales } from './i18nLoader';
import getRouteConfig from './routeConfig';
import { SESSION_REDIRECT_URL } from './constants';
function ErrorFallback({ error }) {
return (
<PageSection>
<Card>
<ContentError error={error} />
</Card>
</PageSection>
);
}
const RenderAppContainer = () => {
const userProfile = useUserProfile();
const navRouteConfig = getRouteConfig(userProfile);
return (
<AppContainer navRouteConfig={navRouteConfig}>
<AuthorizedRoutes routeConfig={navRouteConfig} />
</AppContainer>
);
};
const AuthorizedRoutes = ({ routeConfig }) => {
const isAuthorized = useAuthorizedPath();
const match = useRouteMatch();
if (!isAuthorized) {
return (
<Switch>
<ProtectedRoute
key="/subscription_management"
path="/subscription_management"
>
<PageSection>
<Card>
<SubscriptionEdit />
</Card>
</PageSection>
</ProtectedRoute>
<Route path="*">
<Redirect to="/subscription_management" />
</Route>
</Switch>
);
}
return (
<Switch>
{routeConfig
.flatMap(({ routes }) => routes)
.map(({ path, screen: Screen }) => (
<ProtectedRoute key={path} path={path}>
<Screen match={match} />
</ProtectedRoute>
))
.concat(
<ProtectedRoute key="metrics" path="/metrics">
<Metrics />
</ProtectedRoute>,
<ProtectedRoute key="not-found" path="*">
<NotFound />
</ProtectedRoute>
)}
</Switch>
);
};
export function ProtectedRoute({ children, ...rest }) {
const {
authRedirectTo,
isUserBeingLoggedOut,
loginRedirectOverride,
setAuthRedirectTo,
} = useSession();
const location = useLocation();
useEffect(() => {
setAuthRedirectTo(
authRedirectTo === '/logout'
? '/'
: `${location.pathname}${location.search}`
);
});
if (isAuthenticated(document.cookie)) {
return (
<Route {...rest}>
<ErrorBoundary FallbackComponent={ErrorFallback}>
{children}
</ErrorBoundary>
</Route>
);
}
if (
loginRedirectOverride &&
!window.location.href.includes('/login') &&
!isUserBeingLoggedOut
) {
window.location.replace(loginRedirectOverride);
return null;
}
return <Redirect to="/login" />;
}
function App() {
const history = useHistory();
const { hash, search, pathname } = useLocation();
const searchParams = Object.fromEntries(new URLSearchParams(search));
const pseudolocalization =
searchParams.pseudolocalization === 'true' || false;
let language =
searchParams.lang || getLanguageWithoutRegionCode(navigator) || 'en';
if (!Object.keys(locales).includes(language)) {
// If there isn't a string catalog available for the browser's
// preferred language, default to one that has strings.
language = 'en';
}
useEffect(() => {
dynamicActivate(language, pseudolocalization);
}, [language, pseudolocalization]);
useTitle();
const redirectURL = window.sessionStorage.getItem(SESSION_REDIRECT_URL);
if (redirectURL) {
window.sessionStorage.removeItem(SESSION_REDIRECT_URL);
if (redirectURL !== '/' || redirectURL !== '/home')
history.replace(redirectURL);
}
return (
<I18nProvider i18n={i18n}>
<Background>
<SessionProvider>
<Switch>
<Route exact strict path="/*/">
<Redirect to={`${pathname.slice(0, -1)}${search}${hash}`} />
</Route>
<Route path="/login">
<Login isAuthenticated={isAuthenticated} />
</Route>
<Route exact path="/">
<Redirect to="/home" />
</Route>
<ProtectedRoute>
<ConfigProvider>
<RenderAppContainer />
</ConfigProvider>
</ProtectedRoute>
</Switch>
</SessionProvider>
</Background>
</I18nProvider>
);
}
export default () => (
<HashRouter>
<App />
</HashRouter>
);

View File

@ -1,71 +0,0 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { RootAPI } from 'api';
import * as SessionContext from 'contexts/Session';
import { shallow } from 'enzyme';
import { mountWithContexts } from '../testUtils/enzymeHelpers';
import App, { ProtectedRoute } from './App';
jest.mock('./api');
jest.mock('util/webWorker', () => jest.fn());
describe('<App />', () => {
beforeEach(() => {
RootAPI.readAssetVariables.mockResolvedValue({
data: {
BRAND_NAME: 'AWX',
},
});
});
test('renders ok', async () => {
const contextValues = {
setAuthRedirectTo: jest.fn(),
isSessionExpired: false,
isUserBeingLoggedOut: false,
loginRedirectOverride: null,
};
jest
.spyOn(SessionContext, 'useSession')
.mockImplementation(() => contextValues);
let wrapper;
await act(async () => {
wrapper = shallow(<App />);
});
expect(wrapper.length).toBe(1);
jest.clearAllMocks();
});
test('redirect to login override', async () => {
const { location } = window;
delete window.location;
window.location = {
replace: jest.fn(),
href: '/',
};
expect(window.location.replace).not.toHaveBeenCalled();
const contextValues = {
setAuthRedirectTo: jest.fn(),
isSessionExpired: false,
isUserBeingLoggedOut: false,
loginRedirectOverride: '/sso/test',
};
jest
.spyOn(SessionContext, 'useSession')
.mockImplementation(() => contextValues);
await act(async () => {
mountWithContexts(
<ProtectedRoute>
<div>foo</div>
</ProtectedRoute>
);
});
expect(window.location.replace).toHaveBeenCalled();
window.location = location;
});
});

View File

@ -1,70 +0,0 @@
/* eslint-disable default-param-last */
import axios from 'axios';
import { encodeQueryString } from 'util/qs';
import debounce from 'util/debounce';
import { SESSION_TIMEOUT_KEY } from '../constants';
const updateStorage = debounce((key, val) => {
window.localStorage.setItem(key, val);
window.dispatchEvent(new Event('storage'));
}, 500);
const defaultHttp = axios.create({
xsrfCookieName: 'csrftoken',
xsrfHeaderName: 'X-CSRFToken',
paramsSerializer(params) {
return encodeQueryString(params);
},
});
defaultHttp.interceptors.response.use((response) => {
const timeout = response?.headers['session-timeout'];
if (timeout) {
const timeoutDate = new Date().getTime() + timeout * 1000;
updateStorage(SESSION_TIMEOUT_KEY, String(timeoutDate));
}
return response;
});
class Base {
constructor(http = defaultHttp, baseURL) {
this.http = http;
this.baseUrl = baseURL;
}
create(data) {
return this.http.post(this.baseUrl, data);
}
destroy(id) {
return this.http.delete(`${this.baseUrl}${id}/`);
}
read(params) {
return this.http.get(this.baseUrl, {
params,
});
}
readDetail(id) {
return this.http.get(`${this.baseUrl}${id}/`);
}
readOptions() {
return this.http.options(this.baseUrl);
}
replace(id, data) {
return this.http.put(`${this.baseUrl}${id}/`, data);
}
update(id, data) {
return this.http.patch(`${this.baseUrl}${id}/`, data);
}
copy(id, data) {
return this.http.post(`${this.baseUrl}${id}/copy/`, data);
}
}
export default Base;

View File

@ -1,106 +0,0 @@
import Base from './Base';
describe('Base', () => {
const mockBaseURL = '/api/v2/organizations/';
let BaseAPI;
let mockHttp;
beforeEach(() => {
const createPromise = () => Promise.resolve();
mockHttp = {
delete: jest.fn(createPromise),
get: jest.fn(createPromise),
options: jest.fn(createPromise),
patch: jest.fn(createPromise),
post: jest.fn(createPromise),
put: jest.fn(createPromise),
};
BaseAPI = new Base(mockHttp, mockBaseURL);
});
afterEach(() => {
jest.resetAllMocks();
});
test('create calls http method with expected data', async () => {
const data = { name: 'test ' };
await BaseAPI.create(data);
expect(mockHttp.post).toHaveBeenCalledTimes(1);
expect(mockHttp.post.mock.calls[0][1]).toEqual(data);
});
test('destroy calls http method with expected data', async () => {
const resourceId = 1;
await BaseAPI.destroy(resourceId);
expect(mockHttp.delete).toHaveBeenCalledTimes(1);
expect(mockHttp.delete.mock.calls[0][0]).toEqual(
`${mockBaseURL}${resourceId}/`
);
});
test('read calls http method with expected data', async () => {
const testParams = { foo: 'bar' };
const testParamsDuplicates = { foo: ['bar', 'baz'] };
await BaseAPI.read(testParams);
await BaseAPI.read();
await BaseAPI.read(testParamsDuplicates);
expect(mockHttp.get).toHaveBeenCalledTimes(3);
expect(mockHttp.get.mock.calls[0][0]).toEqual(`${mockBaseURL}`);
expect(mockHttp.get.mock.calls[0][1]).toEqual({ params: { foo: 'bar' } });
expect(mockHttp.get.mock.calls[1][0]).toEqual(`${mockBaseURL}`);
expect(mockHttp.get.mock.calls[1][1]).toEqual({ params: undefined });
expect(mockHttp.get.mock.calls[2][0]).toEqual(`${mockBaseURL}`);
expect(mockHttp.get.mock.calls[2][1]).toEqual({
params: { foo: ['bar', 'baz'] },
});
});
test('readDetail calls http method with expected data', async () => {
const resourceId = 1;
await BaseAPI.readDetail(resourceId);
expect(mockHttp.get).toHaveBeenCalledTimes(1);
expect(mockHttp.get.mock.calls[0][0]).toEqual(
`${mockBaseURL}${resourceId}/`
);
});
test('readOptions calls http method with expected data', async () => {
await BaseAPI.readOptions();
expect(mockHttp.options).toHaveBeenCalledTimes(1);
expect(mockHttp.options.mock.calls[0][0]).toEqual(`${mockBaseURL}`);
});
test('replace calls http method with expected data', async () => {
const resourceId = 1;
const data = { name: 'test ' };
await BaseAPI.replace(resourceId, data);
expect(mockHttp.put).toHaveBeenCalledTimes(1);
expect(mockHttp.put.mock.calls[0][0]).toEqual(
`${mockBaseURL}${resourceId}/`
);
expect(mockHttp.put.mock.calls[0][1]).toEqual(data);
});
test('update calls http method with expected data', async () => {
const resourceId = 1;
const data = { name: 'test ' };
await BaseAPI.update(resourceId, data);
expect(mockHttp.patch).toHaveBeenCalledTimes(1);
expect(mockHttp.patch.mock.calls[0][0]).toEqual(
`${mockBaseURL}${resourceId}/`
);
expect(mockHttp.patch.mock.calls[0][1]).toEqual(data);
});
});

View File

@ -1,154 +0,0 @@
import ActivityStream from './models/ActivityStream';
import AdHocCommands from './models/AdHocCommands';
import Applications from './models/Applications';
import Auth from './models/Auth';
import Config from './models/Config';
import CredentialInputSources from './models/CredentialInputSources';
import CredentialTypes from './models/CredentialTypes';
import Credentials from './models/Credentials';
import ConstructedInventories from './models/ConstructedInventories';
import Dashboard from './models/Dashboard';
import ExecutionEnvironments from './models/ExecutionEnvironments';
import Groups from './models/Groups';
import Hosts from './models/Hosts';
import InstanceGroups from './models/InstanceGroups';
import Instances from './models/Instances';
import Inventories from './models/Inventories';
import InventoryScripts from './models/InventoryScripts';
import InventorySources from './models/InventorySources';
import InventoryUpdates from './models/InventoryUpdates';
import JobTemplates from './models/JobTemplates';
import Jobs from './models/Jobs';
import JobEvents from './models/JobEvents';
import Labels from './models/Labels';
import Me from './models/Me';
import Mesh from './models/Mesh';
import Metrics from './models/Metrics';
import NotificationTemplates from './models/NotificationTemplates';
import Notifications from './models/Notifications';
import Organizations from './models/Organizations';
import ProjectUpdates from './models/ProjectUpdates';
import Projects from './models/Projects';
import ReceptorAddresses from './models/Receptor';
import Roles from './models/Roles';
import Root from './models/Root';
import Schedules from './models/Schedules';
import Settings from './models/Settings';
import SubscriptionUsage from './models/SubscriptionUsage';
import SystemJobs from './models/SystemJobs';
import SystemJobTemplates from './models/SystemJobTemplates';
import Teams from './models/Teams';
import Tokens from './models/Tokens';
import UnifiedJobTemplates from './models/UnifiedJobTemplates';
import UnifiedJobs from './models/UnifiedJobs';
import Users from './models/Users';
import WorkflowApprovals from './models/WorkflowApprovals';
import WorkflowApprovalTemplates from './models/WorkflowApprovalTemplates';
import WorkflowJobTemplateNodes from './models/WorkflowJobTemplateNodes';
import WorkflowJobTemplates from './models/WorkflowJobTemplates';
import WorkflowJobs from './models/WorkflowJobs';
import HostMetrics from './models/HostMetrics';
const ActivityStreamAPI = new ActivityStream();
const AdHocCommandsAPI = new AdHocCommands();
const ApplicationsAPI = new Applications();
const AuthAPI = new Auth();
const ConfigAPI = new Config();
const CredentialInputSourcesAPI = new CredentialInputSources();
const CredentialTypesAPI = new CredentialTypes();
const CredentialsAPI = new Credentials();
const ConstructedInventoriesAPI = new ConstructedInventories();
const DashboardAPI = new Dashboard();
const ExecutionEnvironmentsAPI = new ExecutionEnvironments();
const GroupsAPI = new Groups();
const HostsAPI = new Hosts();
const InstanceGroupsAPI = new InstanceGroups();
const InstancesAPI = new Instances();
const InventoriesAPI = new Inventories();
const InventoryScriptsAPI = new InventoryScripts();
const InventorySourcesAPI = new InventorySources();
const InventoryUpdatesAPI = new InventoryUpdates();
const JobTemplatesAPI = new JobTemplates();
const JobsAPI = new Jobs();
const JobEventsAPI = new JobEvents();
const LabelsAPI = new Labels();
const MeAPI = new Me();
const MeshAPI = new Mesh();
const MetricsAPI = new Metrics();
const NotificationTemplatesAPI = new NotificationTemplates();
const NotificationsAPI = new Notifications();
const OrganizationsAPI = new Organizations();
const ProjectUpdatesAPI = new ProjectUpdates();
const ProjectsAPI = new Projects();
const ReceptorAPI = new ReceptorAddresses();
const RolesAPI = new Roles();
const RootAPI = new Root();
const SchedulesAPI = new Schedules();
const SettingsAPI = new Settings();
const SubscriptionUsageAPI = new SubscriptionUsage();
const SystemJobsAPI = new SystemJobs();
const SystemJobTemplatesAPI = new SystemJobTemplates();
const TeamsAPI = new Teams();
const TokensAPI = new Tokens();
const UnifiedJobTemplatesAPI = new UnifiedJobTemplates();
const UnifiedJobsAPI = new UnifiedJobs();
const UsersAPI = new Users();
const WorkflowApprovalsAPI = new WorkflowApprovals();
const WorkflowApprovalTemplatesAPI = new WorkflowApprovalTemplates();
const WorkflowJobTemplateNodesAPI = new WorkflowJobTemplateNodes();
const WorkflowJobTemplatesAPI = new WorkflowJobTemplates();
const WorkflowJobsAPI = new WorkflowJobs();
const HostMetricsAPI = new HostMetrics();
export {
ActivityStreamAPI,
AdHocCommandsAPI,
ApplicationsAPI,
AuthAPI,
ConfigAPI,
CredentialInputSourcesAPI,
CredentialTypesAPI,
CredentialsAPI,
ConstructedInventoriesAPI,
DashboardAPI,
ExecutionEnvironmentsAPI,
GroupsAPI,
HostsAPI,
InstanceGroupsAPI,
InstancesAPI,
InventoriesAPI,
InventoryScriptsAPI,
InventorySourcesAPI,
InventoryUpdatesAPI,
JobTemplatesAPI,
JobsAPI,
JobEventsAPI,
LabelsAPI,
MeAPI,
MeshAPI,
MetricsAPI,
NotificationTemplatesAPI,
NotificationsAPI,
OrganizationsAPI,
ProjectUpdatesAPI,
ProjectsAPI,
ReceptorAPI,
RolesAPI,
RootAPI,
SchedulesAPI,
SettingsAPI,
SubscriptionUsageAPI,
SystemJobsAPI,
SystemJobTemplatesAPI,
TeamsAPI,
TokensAPI,
UnifiedJobTemplatesAPI,
UnifiedJobsAPI,
UsersAPI,
WorkflowApprovalsAPI,
WorkflowApprovalTemplatesAPI,
WorkflowJobTemplateNodesAPI,
WorkflowJobTemplatesAPI,
WorkflowJobsAPI,
HostMetricsAPI,
};

View File

@ -1,44 +0,0 @@
function isEqual(array1, array2) {
return (
array1.length === array2.length &&
array1.every((element, index) => element.id === array2[index].id)
);
}
const InstanceGroupsMixin = (parent) =>
class extends parent {
readInstanceGroups(resourceId, params) {
return this.http.get(`${this.baseUrl}${resourceId}/instance_groups/`, {
params,
});
}
associateInstanceGroup(resourceId, instanceGroupId) {
return this.http.post(`${this.baseUrl}${resourceId}/instance_groups/`, {
id: instanceGroupId,
});
}
disassociateInstanceGroup(resourceId, instanceGroupId) {
return this.http.post(`${this.baseUrl}${resourceId}/instance_groups/`, {
id: instanceGroupId,
disassociate: true,
});
}
async orderInstanceGroups(resourceId, current, original) {
/* eslint-disable no-await-in-loop, no-restricted-syntax */
// Resolve Promises sequentially to maintain order and avoid race condition
if (!isEqual(current, original)) {
for (const group of original) {
await this.disassociateInstanceGroup(resourceId, group.id);
}
for (const group of current) {
await this.associateInstanceGroup(resourceId, group.id);
}
}
}
/* eslint-enable no-await-in-loop, no-restricted-syntax */
};
export default InstanceGroupsMixin;

View File

@ -1,49 +0,0 @@
const LabelsMixin = (parent) =>
class extends parent {
readLabels(id, params) {
return this.http.get(`${this.baseUrl}${id}/labels/`, {
params,
});
}
readAllLabels(id) {
const fetchLabels = async (pageNo = 1, labels = []) => {
try {
const { data } = await this.http.get(`${this.baseUrl}${id}/labels/`, {
params: {
page: pageNo,
page_size: 200,
},
});
if (data?.next) {
return fetchLabels(pageNo + 1, labels.concat(data.results));
}
return Promise.resolve({
data: {
results: labels.concat(data.results),
},
});
} catch (error) {
return Promise.reject(error);
}
};
return fetchLabels();
}
associateLabel(id, label, orgId) {
return this.http.post(`${this.baseUrl}${id}/labels/`, {
name: label.name,
organization: orgId,
});
}
disassociateLabel(id, label) {
return this.http.post(`${this.baseUrl}${id}/labels/`, {
id: label.id,
disassociate: true,
});
}
};
export default LabelsMixin;

View File

@ -1,12 +0,0 @@
const LaunchUpdateMixin = (parent) =>
class extends parent {
launchUpdate(id, data) {
return this.http.post(`${this.baseUrl}${id}/update/`, data);
}
readLaunchUpdate(id) {
return this.http.get(`${this.baseUrl}${id}/update/`);
}
};
export default LaunchUpdateMixin;

View File

@ -1,170 +0,0 @@
const NotificationsMixin = (parent) =>
class extends parent {
readOptionsNotificationTemplates(id) {
return this.http.options(`${this.baseUrl}${id}/notification_templates/`);
}
readNotificationTemplates(id, params) {
return this.http.get(
`${this.baseUrl}${id}/notification_templates/`,
params
);
}
readNotificationTemplatesStarted(id, params) {
return this.http.get(
`${this.baseUrl}${id}/notification_templates_started/`,
{ params }
);
}
readNotificationTemplatesSuccess(id, params) {
return this.http.get(
`${this.baseUrl}${id}/notification_templates_success/`,
{ params }
);
}
readNotificationTemplatesError(id, params) {
return this.http.get(
`${this.baseUrl}${id}/notification_templates_error/`,
{ params }
);
}
associateNotificationTemplatesStarted(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_started/`,
{ id: notificationId }
);
}
disassociateNotificationTemplatesStarted(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_started/`,
{ id: notificationId, disassociate: true }
);
}
associateNotificationTemplatesSuccess(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_success/`,
{ id: notificationId }
);
}
disassociateNotificationTemplatesSuccess(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_success/`,
{ id: notificationId, disassociate: true }
);
}
associateNotificationTemplatesError(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_error/`,
{ id: notificationId }
);
}
disassociateNotificationTemplatesError(resourceId, notificationId) {
return this.http.post(
`${this.baseUrl}${resourceId}/notification_templates_error/`,
{ id: notificationId, disassociate: true }
);
}
/**
* This is a helper method meant to simplify setting the "on" status of
* a related notification.
*
* @param[resourceId] - id of the base resource
* @param[notificationId] - id of the notification
* @param[notificationType] - the type of notification, options are "success" and "error"
*/
associateNotificationTemplate(
resourceId,
notificationId,
notificationType
) {
if (notificationType === 'approvals') {
return this.associateNotificationTemplatesApprovals(
resourceId,
notificationId
);
}
if (notificationType === 'started') {
return this.associateNotificationTemplatesStarted(
resourceId,
notificationId
);
}
if (notificationType === 'success') {
return this.associateNotificationTemplatesSuccess(
resourceId,
notificationId
);
}
if (notificationType === 'error') {
return this.associateNotificationTemplatesError(
resourceId,
notificationId
);
}
throw new Error(
`Unsupported notificationType for association: ${notificationType}`
);
}
/**
* This is a helper method meant to simplify setting the "off" status of
* a related notification.
*
* @param[resourceId] - id of the base resource
* @param[notificationId] - id of the notification
* @param[notificationType] - the type of notification, options are "success" and "error"
*/
disassociateNotificationTemplate(
resourceId,
notificationId,
notificationType
) {
if (notificationType === 'approvals') {
return this.disassociateNotificationTemplatesApprovals(
resourceId,
notificationId
);
}
if (notificationType === 'started') {
return this.disassociateNotificationTemplatesStarted(
resourceId,
notificationId
);
}
if (notificationType === 'success') {
return this.disassociateNotificationTemplatesSuccess(
resourceId,
notificationId
);
}
if (notificationType === 'error') {
return this.disassociateNotificationTemplatesError(
resourceId,
notificationId
);
}
throw new Error(
`Unsupported notificationType for disassociation: ${notificationType}`
);
}
};
export default NotificationsMixin;

View File

@ -1,48 +0,0 @@
const Runnable = (parent) =>
class extends parent {
jobEventSlug = '/events/';
cancel(id) {
const endpoint = `${this.baseUrl}${id}/cancel/`;
return this.http.post(endpoint);
}
launchUpdate(id, data) {
const endpoint = `${this.baseUrl}${id}/update/`;
return this.http.post(endpoint, data);
}
readLaunchUpdate(id) {
const endpoint = `${this.baseUrl}${id}/update/`;
return this.http.get(endpoint);
}
readEvents(id, params = {}) {
const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`;
return this.http.get(endpoint, { params });
}
readEventOptions(id) {
const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`;
return this.http.options(endpoint);
}
readRelaunch(id) {
const endpoint = `${this.baseUrl}${id}/relaunch/`;
return this.http.get(endpoint);
}
relaunch(id, data) {
const endpoint = `${this.baseUrl}${id}/relaunch/`;
return this.http.post(endpoint, data);
}
};
export default Runnable;

View File

@ -1,16 +0,0 @@
const SchedulesMixin = (parent) =>
class extends parent {
createSchedule(id, data) {
return this.http.post(`${this.baseUrl}${id}/schedules/`, data);
}
readSchedules(id, params) {
return this.http.get(`${this.baseUrl}${id}/schedules/`, { params });
}
readScheduleOptions(id) {
return this.http.options(`${this.baseUrl}${id}/schedules/`);
}
};
export default SchedulesMixin;

View File

@ -1,10 +0,0 @@
import Base from '../Base';
class ActivityStream extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/activity_stream/';
}
}
export default ActivityStream;

View File

@ -1,15 +0,0 @@
import Base from '../Base';
import RunnableMixin from '../mixins/Runnable.mixin';
class AdHocCommands extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/ad_hoc_commands/';
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default AdHocCommands;

View File

@ -1,20 +0,0 @@
import Base from '../Base';
class Applications extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/applications/';
}
readTokens(appId, params) {
return this.http.get(`${this.baseUrl}${appId}/tokens/`, {
params,
});
}
readTokenOptions(appId) {
return this.http.options(`${this.baseUrl}${appId}/tokens/`);
}
}
export default Applications;

View File

@ -1,10 +0,0 @@
import Base from '../Base';
class Auth extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/auth/';
}
}
export default Auth;

View File

@ -1,22 +0,0 @@
import Base from '../Base';
class Config extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/config/';
this.read = this.read.bind(this);
}
readSubscriptions(username, password) {
return this.http.post(`${this.baseUrl}subscriptions/`, {
subscriptions_username: username,
subscriptions_password: password,
});
}
attach(data) {
return this.http.post(`${this.baseUrl}attach/`, data);
}
}
export default Config;

View File

@ -1,25 +0,0 @@
import Base from '../Base';
import InstanceGroupsMixin from '../mixins/InstanceGroups.mixin';
class ConstructedInventories extends InstanceGroupsMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/constructed_inventories/';
}
async readConstructedInventoryOptions(id, method) {
const {
data: { actions },
} = await this.http.options(`${this.baseUrl}${id}/`);
if (actions[method]) {
return actions[method];
}
throw new Error(
`You have insufficient access to this Constructed Inventory.
Please contact your system administrator if there is an issue with your access.`
);
}
}
export default ConstructedInventories;

View File

@ -1,51 +0,0 @@
import ConstructedInventories from './ConstructedInventories';
describe('ConstructedInventoriesAPI', () => {
const constructedInventoryId = 1;
const constructedInventoryMethod = 'PUT';
let ConstructedInventoriesAPI;
let mockHttp;
beforeEach(() => {
const optionsPromise = () =>
Promise.resolve({
data: {
actions: {
PUT: {},
},
},
});
mockHttp = {
options: jest.fn(optionsPromise),
};
ConstructedInventoriesAPI = new ConstructedInventories(mockHttp);
});
afterEach(() => {
jest.resetAllMocks();
});
test('readConstructedInventoryOptions calls options with the expected params', async () => {
await ConstructedInventoriesAPI.readConstructedInventoryOptions(
constructedInventoryId,
constructedInventoryMethod
);
expect(mockHttp.options).toHaveBeenCalledTimes(1);
expect(mockHttp.options).toHaveBeenCalledWith(
`api/v2/constructed_inventories/${constructedInventoryId}/`
);
});
test('readConstructedInventory should throw an error if action method is missing', async () => {
try {
await ConstructedInventoriesAPI.readConstructedInventoryOptions(
constructedInventoryId,
'POST'
);
} catch (error) {
expect(error.message).toContain(
'You have insufficient access to this Constructed Inventory.'
);
}
});
});

View File

@ -1,10 +0,0 @@
import Base from '../Base';
class CredentialInputSources extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/credential_input_sources/';
}
}
export default CredentialInputSources;

Some files were not shown because too many files have changed in this diff Show More