1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-31 06:51:10 +03:00

add functional API tests for deprecated job event stdout composition

see: https://github.com/ansible/awx/issues/200
This commit is contained in:
Ryan Petrello 2017-12-15 13:40:37 -05:00
parent 1369f72885
commit c4d901bf2c
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777
4 changed files with 310 additions and 20 deletions

View File

@ -4624,7 +4624,7 @@ class UnifiedJobStdout(RetrieveAPIView):
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
except StdoutMaxBytesExceeded as e:
response_message = _(
"Standard Output too large to display {text_size} bytes), "
"Standard Output too large to display ({text_size} bytes), "
"only download supported for sizes over {supported_size} bytes").format(
text_size=e.total, supported_size=e.supported
)

View File

@ -2,6 +2,7 @@
# All Rights Reserved.
# Python
from cStringIO import StringIO
import json
import logging
import os
@ -906,21 +907,59 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
related.save()
def result_stdout_raw_handle(self, enforce_max_bytes=True):
"""Return a file-like object containing the standard out of the
job's result.
"""
if not os.path.exists(settings.JOBOUTPUT_ROOT):
os.makedirs(settings.JOBOUTPUT_ROOT)
fd = tempfile.NamedTemporaryFile(
prefix='{}-{}-'.format(self.model_to_str(), self.pk),
suffix='.out',
dir=settings.JOBOUTPUT_ROOT
)
This method returns a file-like object ready to be read which contains
all stdout for the UnifiedJob.
If the size of the file is greater than
`settings.STDOUT_MAX_BYTES_DISPLAY`, a StdoutMaxBytesExceeded exception
will be raised.
"""
max_supported = settings.STDOUT_MAX_BYTES_DISPLAY
if enforce_max_bytes:
# If enforce_max_bytes is True, we're not grabbing the whole file,
# just the first <settings.STDOUT_MAX_BYTES_DISPLAY> bytes;
# in this scenario, it's probably safe to use a StringIO.
fd = StringIO()
else:
# If enforce_max_bytes = False, that means they're downloading
# the entire file. To avoid ballooning memory, let's write the
# stdout content to a temporary disk location
if not os.path.exists(settings.JOBOUTPUT_ROOT):
os.makedirs(settings.JOBOUTPUT_ROOT)
fd = tempfile.NamedTemporaryFile(
prefix='{}-{}-'.format(self.model_to_str(), self.pk),
suffix='.out',
dir=settings.JOBOUTPUT_ROOT
)
# Before the addition of event-based stdout, older versions of
# awx stored stdout as raw text blobs in a certain database column
# (`main_unifiedjob.result_stdout_text`)
# For older installs, this data still exists in the database; check for
# it and use if it exists
legacy_stdout_text = self.result_stdout_text
if legacy_stdout_text:
if enforce_max_bytes and len(legacy_stdout_text) > max_supported:
raise StdoutMaxBytesExceeded(len(legacy_stdout_text), max_supported)
fd.write(legacy_stdout_text)
fd.flush()
if hasattr(fd, 'name'):
fd.flush()
return open(fd.name, 'r')
else:
# we just wrote to this StringIO, so rewind it
fd.seek(0)
return fd
else:
# Note: the code in this block _intentionally_ does not use the
# Django ORM because of the potential size (many MB+) of
# `main_jobevent.stdout`; we *do not* want to generate queries
# here that construct model objects by fetching large gobs of
# data (and potentially ballooning memory usage); instead, we
# just want to write concatenated values of a certain column
# (`stdout`) directly to a file
with connection.cursor() as cursor:
tablename = self._meta.db_table
related_name = {
@ -930,6 +969,22 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
'main_inventoryupdate': 'inventory_update_id',
'main_systemjob': 'system_job_id',
}[tablename]
if enforce_max_bytes:
# detect the length of all stdout for this UnifiedJob, and
# if it exceeds settings.STDOUT_MAX_BYTES_DISPLAY bytes,
# don't bother actually fetching the data
cursor.execute(
"select sum(length(stdout)) from {} where {}={}".format(
tablename + 'event',
related_name,
self.id
)
)
total_bytes = cursor.fetchone()[0]
if total_bytes > max_supported:
raise StdoutMaxBytesExceeded(total_bytes, max_supported)
cursor.copy_expert(
"copy (select stdout from {} where {}={} order by start_line) to stdout".format(
tablename + 'event',
@ -938,15 +993,18 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
),
fd
)
fd.flush()
subprocess.Popen("sed -i 's/\\\\r\\\\n/\\n/g' {}".format(fd.name), shell=True).wait()
if enforce_max_bytes:
total_size = os.stat(fd.name).st_size
max_supported = settings.STDOUT_MAX_BYTES_DISPLAY
if total_size > max_supported:
raise StdoutMaxBytesExceeded(total_size, max_supported)
return open(fd.name, 'r')
if hasattr(fd, 'name'):
# If we're dealing with a physical file, use `sed` to clean
# up escaped line sequences
fd.flush()
subprocess.Popen("sed -i 's/\\\\r\\\\n/\\n/g' {}".format(fd.name), shell=True).wait()
return open(fd.name, 'r')
else:
# If we're dealing with an in-memory string buffer, use
# string.replace()
fd = StringIO(fd.getvalue().replace('\\r\\n', '\n'))
return fd
def _escape_ascii(self, content):
# Remove ANSI escape sequences used to embed event data.

View File

@ -0,0 +1,231 @@
import re
import shutil
import tempfile
from django.conf import settings
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
import pytest
from awx.api.versioning import reverse
from awx.main.models import (Job, JobEvent, AdHocCommand, AdHocCommandEvent,
Project, ProjectUpdate, ProjectUpdateEvent,
InventoryUpdate, InventorySource,
InventoryUpdateEvent, SystemJob, SystemJobEvent)
def _mk_project_update():
project = Project()
project.save()
return ProjectUpdate(project=project)
def _mk_inventory_update():
source = InventorySource()
source.save()
iu = InventoryUpdate(inventory_source=source)
return iu
@pytest.fixture(scope='function')
def sqlite_copy_expert(request):
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
path = tempfile.mkdtemp(prefix='job-event-stdout')
def write_stdout(self, sql, fd):
# simulate postgres copy_expert support with ORM code
parts = sql.split(' ')
tablename = parts[parts.index('from') + 1]
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent,
InventoryUpdateEvent, SystemJobEvent):
if cls._meta.db_table == tablename:
for event in cls.objects.order_by('start_line').all():
fd.write(event.stdout)
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
request.addfinalizer(lambda: shutil.rmtree(path))
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
return path
@pytest.mark.django_db
@pytest.mark.parametrize('Parent, Child, relation, view', [
[Job, JobEvent, 'job', 'api:job_stdout'],
[AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
[_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
])
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(3):
Child(**{relation: job, 'stdout': 'Testing {}\n'.format(i), 'start_line': i}).save()
url = reverse(view, kwargs={'pk': job.pk}) + '?format=txt'
response = get(url, user=admin, expect=200)
assert response.content.splitlines() == ['Testing %d' % i for i in range(3)]
@pytest.mark.django_db
@pytest.mark.parametrize('Parent, Child, relation, view', [
[Job, JobEvent, 'job', 'api:job_stdout'],
[AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
[_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
])
@pytest.mark.parametrize('download', [True, False])
def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation,
view, download, get, admin):
job = Parent()
job.save()
for i in range(3):
Child(**{
relation: job,
'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i),
'start_line': i
}).save()
url = reverse(view, kwargs={'pk': job.pk})
# ansi codes in ?format=txt should get filtered
fmt = "?format={}".format("txt_download" if download else "txt")
response = get(url + fmt, user=admin, expect=200)
assert response.content.splitlines() == ['Testing %d' % i for i in range(3)]
has_download_header = response.has_header('Content-Disposition')
assert has_download_header if download else not has_download_header
# ask for ansi and you'll get it
fmt = "?format={}".format("ansi_download" if download else "ansi")
response = get(url + fmt, user=admin, expect=200)
assert response.content.splitlines() == ['\x1B[0;36mTesting %d\x1B[0m' % i for i in range(3)]
has_download_header = response.has_header('Content-Disposition')
assert has_download_header if download else not has_download_header
@pytest.mark.django_db
@pytest.mark.parametrize('Parent, Child, relation, view', [
[Job, JobEvent, 'job', 'api:job_stdout'],
[AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
[_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
])
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(3):
Child(**{
relation: job,
'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i),
'start_line': i
}).save()
url = reverse(view, kwargs={'pk': job.pk}) + '?format=html'
response = get(url, user=admin, expect=200)
assert '.ansi36 { color: #2dbaba; }' in response.content
for i in range(3):
assert '<span class="ansi36">Testing {}</span>'.format(i) in response.content
@pytest.mark.django_db
@pytest.mark.parametrize('Parent, Child, relation, view', [
[Job, JobEvent, 'job', 'api:job_stdout'],
[AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
[_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
])
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(20):
Child(**{relation: job, 'stdout': 'Testing {}\n'.format(i), 'start_line': i}).save()
url = reverse(view, kwargs={'pk': job.pk}) + '?format=html&start_line=5&end_line=10'
response = get(url, user=admin, expect=200)
assert re.findall('Testing [0-9]+', response.content) == ['Testing %d' % i for i in range(5, 10)]
@pytest.mark.django_db
def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
job = SystemJob()
job.save()
for i in range(3):
SystemJobEvent(system_job=job, stdout='Testing {}\n'.format(i), start_line=i).save()
url = reverse('api:system_job_detail', kwargs={'pk': job.pk})
response = get(url, user=admin, expect=200)
assert response.data['result_stdout'].splitlines() == ['Testing %d' % i for i in range(3)]
@pytest.mark.django_db
@pytest.mark.parametrize('Parent, Child, relation, view', [
[Job, JobEvent, 'job', 'api:job_stdout'],
[AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
[_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
])
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin):
job = Parent()
job.save()
total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1
large_stdout = 'X' * total_bytes
Child(**{relation: job, 'stdout': large_stdout, 'start_line': 0}).save()
url = reverse(view, kwargs={'pk': job.pk})
response = get(url + '?format={}'.format(fmt), user=admin, expect=200)
assert response.content == (
'Standard Output too large to display ({actual} bytes), only download '
'supported for sizes over {max} bytes'.format(
actual=total_bytes,
max=settings.STDOUT_MAX_BYTES_DISPLAY
)
)
response = get(url + '?format={}_download'.format(fmt), user=admin, expect=200)
assert response.content == large_stdout
@pytest.mark.django_db
@pytest.mark.parametrize('Cls, view', [
[_mk_project_update, 'api:project_update_stdout'],
[_mk_inventory_update, 'api:inventory_update_stdout']
])
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
def test_legacy_result_stdout_text_fallback(Cls, view, fmt, get, admin):
# older versions of stored raw stdout in a raw text blob at
# main_unifiedjob.result_stdout_text; this test ensures that fallback
# works properly if no job events exist
job = Cls()
job.save()
job.result_stdout_text = 'LEGACY STDOUT!'
job.save()
url = reverse(view, kwargs={'pk': job.pk})
response = get(url + '?format={}'.format(fmt), user=admin, expect=200)
assert response.content == 'LEGACY STDOUT!'
@pytest.mark.django_db
@pytest.mark.parametrize('Cls, view', [
[_mk_project_update, 'api:project_update_stdout'],
[_mk_inventory_update, 'api:inventory_update_stdout']
])
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
job = Cls()
job.save()
total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1
large_stdout = 'X' * total_bytes
job.result_stdout_text = large_stdout
job.save()
url = reverse(view, kwargs={'pk': job.pk})
response = get(url + '?format={}'.format(fmt), user=admin, expect=200)
assert response.content == (
'Standard Output too large to display ({actual} bytes), only download '
'supported for sizes over {max} bytes'.format(
actual=total_bytes,
max=settings.STDOUT_MAX_BYTES_DISPLAY
)
)
response = get(url + '?format={}'.format(fmt + '_download'), user=admin, expect=200)
assert response.content == large_stdout

View File

@ -544,7 +544,8 @@ def _request(verb):
response.data = data_copy
print(response.data)
assert response.status_code == expect
response.render()
if hasattr(response, 'render'):
response.render()
return response
return rf