1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-31 15:21:13 +03:00

slightly refactor isolated event consumption

This commit is contained in:
Ryan Petrello 2019-03-29 10:48:42 -04:00
parent 8f089c02a5
commit a59bc33280
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777

View File

@ -187,39 +187,7 @@ class IsolatedManager(object):
self.private_data_dir,
extravars=extravars)
status, rc = runner_obj.status, runner_obj.rc
# discover new events and ingest them
events_path = self.path_to('artifacts', self.ident, 'job_events')
# it's possible that `events_path` doesn't exist *yet*, because runner
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
# only attempt to consume events if any were rsynced back
if os.path.exists(events_path):
for event in set(os.listdir(events_path)) - self.handled_events:
path = os.path.join(events_path, event)
if os.path.exists(path):
try:
event_data = json.load(
open(os.path.join(events_path, event), 'r')
)
except json.decoder.JSONDecodeError:
# This means the event we got back isn't valid JSON
# that can happen if runner is still partially
# writing an event file while it's rsyncing
# these event writes are _supposed_ to be atomic
# but it doesn't look like they actually are in
# practice
# in this scenario, just ignore this event and try it
# again on the next sync
pass
event_data.setdefault(self.event_data_key, self.instance.id)
dispatcher.dispatch(event_data)
self.handled_events.add(event)
# handle artifacts
if event_data.get('event_data', {}).get('artifact_data', {}):
self.instance.artifacts = event_data['event_data']['artifact_data']
self.instance.save(update_fields=['artifacts'])
self.consume_events(dispatcher)
last_check = time.time()
@ -231,6 +199,10 @@ class IsolatedManager(object):
with open(rc_path, 'r') as f:
rc = int(f.readline())
# consume events one last time just to be sure we didn't miss anything
# in the final sync
self.consume_events(dispatcher)
# emit an EOF event
event_data = {
'event': 'EOF',
@ -241,6 +213,41 @@ class IsolatedManager(object):
return status, rc
def consume_events(self, dispatcher):
# discover new events and ingest them
events_path = self.path_to('artifacts', self.ident, 'job_events')
# it's possible that `events_path` doesn't exist *yet*, because runner
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
# only attempt to consume events if any were rsynced back
if os.path.exists(events_path):
for event in set(os.listdir(events_path)) - self.handled_events:
path = os.path.join(events_path, event)
if os.path.exists(path):
try:
event_data = json.load(
open(os.path.join(events_path, event), 'r')
)
except json.decoder.JSONDecodeError:
# This means the event we got back isn't valid JSON
# that can happen if runner is still partially
# writing an event file while it's rsyncing
# these event writes are _supposed_ to be atomic
# but it doesn't look like they actually are in
# practice
# in this scenario, just ignore this event and try it
# again on the next sync
pass
event_data.setdefault(self.event_data_key, self.instance.id)
dispatcher.dispatch(event_data)
self.handled_events.add(event)
# handle artifacts
if event_data.get('event_data', {}).get('artifact_data', {}):
self.instance.artifacts = event_data['event_data']['artifact_data']
self.instance.save(update_fields=['artifacts'])
def cleanup(self):
# If the job failed for any reason, make a last-ditch effort at cleanup
extravars = {