Skip to content

Commit 8505783

Browse files
committed
Merge remote-tracking branch 'tower/release_3.2.3' into devel
* tower/release_3.2.3: fix unicode bugs with log statements use --export option for ansible-inventory add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands enforce strings for secret password inputs on Credentials fix a bug for "users should be able to change type of unused credential" fix xss vulnerabilities - on host recent jobs popover - on schedule name tooltip fix a bug when testing UDP-based logging configuration bump templates form credential_types page limit Wait for Slack RTM API websocket connection to be established don't process artifacts from custom `set_stat` calls asynchronously don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled only allow facts to cache in the proper file system location replace our memcached-based fact cache implementation with local files add support for new "BECOME" prompt in Ansible 2.5+ fix a bug in inventory generation for isolated nodes properly handle unicode for isolated job buffers
2 parents 76ff925 + 1a6819c commit 8505783

File tree

16 files changed

+447
-218
lines changed

16 files changed

+447
-218
lines changed

awx/api/serializers.py

+1
Original file line numberDiff line numberDiff line change
@@ -2196,6 +2196,7 @@ def validate_credential_type(self, credential_type):
21962196
_('You cannot change the credential type of the credential, as it may break the functionality'
21972197
' of the resources using it.'),
21982198
)
2199+
21992200
return credential_type
22002201

22012202

awx/conf/views.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from awx.api.permissions import IsSuperUser
2222
from awx.api.versioning import reverse, get_request_version
2323
from awx.main.utils import * # noqa
24-
from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException
24+
from awx.main.utils.handlers import BaseHTTPSHandler, UDPHandler, LoggingConnectivityException
2525
from awx.main.tasks import handle_setting_changes
2626
from awx.conf.license import get_licensed_features
2727
from awx.conf.models import Setting
@@ -199,7 +199,11 @@ class MockSettings:
199199
for k, v in serializer.validated_data.items():
200200
setattr(mock_settings, k, v)
201201
mock_settings.LOG_AGGREGATOR_LEVEL = 'DEBUG'
202-
BaseHTTPSHandler.perform_test(mock_settings)
202+
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
203+
UDPHandler.perform_test(mock_settings)
204+
return Response(status=status.HTTP_201_CREATED)
205+
else:
206+
BaseHTTPSHandler.perform_test(mock_settings)
203207
except LoggingConnectivityException as e:
204208
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
205209
return Response(status=status.HTTP_200_OK)

awx/lib/awx_display_callback/module.py

+18-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,11 @@
1818
from __future__ import (absolute_import, division, print_function)
1919

2020
# Python
21+
import codecs
2122
import contextlib
23+
import json
24+
import os
25+
import stat
2226
import sys
2327
import uuid
2428
from copy import copy
@@ -292,10 +296,22 @@ def v2_playbook_on_stats(self, stats):
292296
failures=stats.failures,
293297
ok=stats.ok,
294298
processed=stats.processed,
295-
skipped=stats.skipped,
296-
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
299+
skipped=stats.skipped
297300
)
298301

302+
# write custom set_stat artifact data to the local disk so that it can
303+
# be persisted by awx after the process exits
304+
custom_artifact_data = stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
305+
if custom_artifact_data:
306+
# create the directory for custom stats artifacts to live in (if it doesn't exist)
307+
custom_artifacts_dir = os.path.join(os.getenv('AWX_PRIVATE_DATA_DIR'), 'artifacts')
308+
os.makedirs(custom_artifacts_dir, mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
309+
310+
custom_artifacts_path = os.path.join(custom_artifacts_dir, 'custom')
311+
with codecs.open(custom_artifacts_path, 'w', encoding='utf-8') as f:
312+
os.chmod(custom_artifacts_path, stat.S_IRUSR | stat.S_IWUSR)
313+
json.dump(custom_artifact_data, f)
314+
299315
with self.capture_event_data('playbook_on_stats', **event_data):
300316
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)
301317

awx/lib/tests/test_display_callback.py

+25
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@
77
import json
88
import mock
99
import os
10+
import shutil
1011
import sys
12+
import tempfile
1113

1214
import pytest
1315

@@ -259,3 +261,26 @@ def test_callback_plugin_strips_task_environ_variables(executor, cache, playbook
259261
assert len(cache)
260262
for event in cache.values():
261263
assert os.environ['PATH'] not in json.dumps(event)
264+
265+
266+
@pytest.mark.parametrize('playbook', [
267+
{'custom_set_stat.yml': '''
268+
- name: custom set_stat calls should persist to the local disk so awx can save them
269+
connection: local
270+
hosts: all
271+
tasks:
272+
- set_stats:
273+
data:
274+
foo: "bar"
275+
'''}, # noqa
276+
])
277+
def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
278+
try:
279+
private_data_dir = tempfile.mkdtemp()
280+
with mock.patch.dict(os.environ, {'AWX_PRIVATE_DATA_DIR': private_data_dir}):
281+
executor.run()
282+
artifacts_path = os.path.join(private_data_dir, 'artifacts', 'custom')
283+
with open(artifacts_path, 'r') as f:
284+
assert json.load(f) == {'foo': 'bar'}
285+
finally:
286+
shutil.rmtree(os.path.join(private_data_dir))

awx/main/fields.py

+6
Original file line numberDiff line numberDiff line change
@@ -506,6 +506,12 @@ def validate(self, value, model_instance):
506506
v != '$encrypted$',
507507
model_instance.pk
508508
]):
509+
if not isinstance(getattr(model_instance, k), six.string_types):
510+
raise django_exceptions.ValidationError(
511+
_('secret values must be of type string, not {}').format(type(v).__name__),
512+
code='invalid',
513+
params={'value': v},
514+
)
509515
decrypted_values[k] = utils.decrypt_field(model_instance, k)
510516
else:
511517
decrypted_values[k] = v

awx/main/models/jobs.py

+61-78
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,22 @@
22
# All Rights Reserved.
33

44
# Python
5+
import codecs
56
import datetime
67
import logging
8+
import os
79
import time
810
import json
9-
import base64
1011
from urlparse import urljoin
1112

13+
import six
14+
1215
# Django
1316
from django.conf import settings
1417
from django.db import models
1518
#from django.core.cache import cache
16-
import memcache
17-
from dateutil import parser
18-
from dateutil.tz import tzutc
1919
from django.utils.encoding import smart_str
20+
from django.utils.timezone import now
2021
from django.utils.translation import ugettext_lazy as _
2122
from django.core.exceptions import ValidationError, FieldDoesNotExist
2223

@@ -738,86 +739,68 @@ def get_notification_templates(self):
738739
def get_notification_friendly_name(self):
739740
return "Job"
740741

741-
@property
742-
def memcached_fact_key(self):
743-
return '{}'.format(self.inventory.id)
744-
745-
def memcached_fact_host_key(self, host_name):
746-
return '{}-{}'.format(self.inventory.id, base64.b64encode(host_name.encode('utf-8')))
747-
748-
def memcached_fact_modified_key(self, host_name):
749-
return '{}-{}-modified'.format(self.inventory.id, base64.b64encode(host_name.encode('utf-8')))
750-
751-
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'modified',]):
752-
return self.inventory.hosts.only(*only)
753-
754-
def _get_memcache_connection(self):
755-
return memcache.Client([settings.CACHES['default']['LOCATION']], debug=0)
756-
757-
def start_job_fact_cache(self):
758-
if not self.inventory:
759-
return
760-
761-
cache = self._get_memcache_connection()
762-
763-
host_names = []
764-
765-
for host in self._get_inventory_hosts():
766-
host_key = self.memcached_fact_host_key(host.name)
767-
modified_key = self.memcached_fact_modified_key(host.name)
768-
769-
if cache.get(modified_key) is None:
770-
if host.ansible_facts_modified:
771-
host_modified = host.ansible_facts_modified.replace(tzinfo=tzutc()).isoformat()
772-
else:
773-
host_modified = datetime.datetime.now(tzutc()).isoformat()
774-
cache.set(host_key, json.dumps(host.ansible_facts))
775-
cache.set(modified_key, host_modified)
776-
777-
host_names.append(host.name)
778-
779-
cache.set(self.memcached_fact_key, host_names)
780-
781-
def finish_job_fact_cache(self):
742+
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified',]):
782743
if not self.inventory:
783-
return
784-
785-
cache = self._get_memcache_connection()
744+
return []
745+
return self.inventory.hosts.only(*only)
786746

747+
def start_job_fact_cache(self, destination, modification_times, timeout=None):
748+
destination = os.path.join(destination, 'facts')
749+
os.makedirs(destination, mode=0700)
787750
hosts = self._get_inventory_hosts()
751+
if timeout is None:
752+
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
753+
if timeout > 0:
754+
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
755+
timeout = now() - datetime.timedelta(seconds=timeout)
756+
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
788757
for host in hosts:
789-
host_key = self.memcached_fact_host_key(host.name)
790-
modified_key = self.memcached_fact_modified_key(host.name)
791-
792-
modified = cache.get(modified_key)
793-
if modified is None:
794-
cache.delete(host_key)
758+
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
759+
if not os.path.realpath(filepath).startswith(destination):
760+
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
795761
continue
796-
797-
# Save facts if cache is newer than DB
798-
modified = parser.parse(modified, tzinfos=[tzutc()])
799-
if not host.ansible_facts_modified or modified > host.ansible_facts_modified:
800-
ansible_facts = cache.get(host_key)
801-
try:
802-
ansible_facts = json.loads(ansible_facts)
803-
except Exception:
804-
ansible_facts = None
805-
806-
if ansible_facts is None:
807-
cache.delete(host_key)
808-
continue
809-
host.ansible_facts = ansible_facts
810-
host.ansible_facts_modified = modified
811-
if 'insights' in ansible_facts and 'system_id' in ansible_facts['insights']:
812-
host.insights_system_id = ansible_facts['insights']['system_id']
813-
host.save()
762+
with codecs.open(filepath, 'w', encoding='utf-8') as f:
763+
os.chmod(f.name, 0600)
764+
json.dump(host.ansible_facts, f)
765+
# make note of the time we wrote the file so we can check if it changed later
766+
modification_times[filepath] = os.path.getmtime(filepath)
767+
768+
def finish_job_fact_cache(self, destination, modification_times):
769+
destination = os.path.join(destination, 'facts')
770+
for host in self._get_inventory_hosts():
771+
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
772+
if not os.path.realpath(filepath).startswith(destination):
773+
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
774+
continue
775+
if os.path.exists(filepath):
776+
# If the file changed since we wrote it pre-playbook run...
777+
modified = os.path.getmtime(filepath)
778+
if modified > modification_times.get(filepath, 0):
779+
with codecs.open(filepath, 'r', encoding='utf-8') as f:
780+
try:
781+
ansible_facts = json.load(f)
782+
except ValueError:
783+
continue
784+
host.ansible_facts = ansible_facts
785+
host.ansible_facts_modified = now()
786+
if 'insights' in ansible_facts and 'system_id' in ansible_facts['insights']:
787+
host.insights_system_id = ansible_facts['insights']['system_id']
788+
host.save()
789+
system_tracking_logger.info(
790+
'New fact for inventory {} host {}'.format(
791+
smart_str(host.inventory.name), smart_str(host.name)),
792+
extra=dict(inventory_id=host.inventory.id, host_name=host.name,
793+
ansible_facts=host.ansible_facts,
794+
ansible_facts_modified=host.ansible_facts_modified.isoformat(),
795+
job_id=self.id))
796+
else:
797+
# if the file goes missing, ansible removed it (likely via clear_facts)
798+
host.ansible_facts = {}
799+
host.ansible_facts_modified = now()
814800
system_tracking_logger.info(
815-
'New fact for inventory {} host {}'.format(
816-
smart_str(host.inventory.name), smart_str(host.name)),
817-
extra=dict(inventory_id=host.inventory.id, host_name=host.name,
818-
ansible_facts=host.ansible_facts,
819-
ansible_facts_modified=host.ansible_facts_modified.isoformat(),
820-
job_id=self.id))
801+
'Facts cleared for inventory {} host {}'.format(
802+
smart_str(host.inventory.name), smart_str(host.name)))
803+
host.save()
821804

822805

823806
# Add on aliases for the non-related-model fields

awx/main/notifications/slack_backend.py

+14-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Copyright (c) 2016 Ansible, Inc.
22
# All Rights Reserved.
33

4+
import time
45
import logging
56
from slackclient import SlackClient
67

@@ -9,6 +10,7 @@
910
from awx.main.notifications.base import AWXBaseEmailBackend
1011

1112
logger = logging.getLogger('awx.main.notifications.slack_backend')
13+
WEBSOCKET_TIMEOUT = 30
1214

1315

1416
class SlackBackend(AWXBaseEmailBackend):
@@ -30,7 +32,18 @@ def open(self):
3032
if not self.connection.rtm_connect():
3133
if not self.fail_silently:
3234
raise Exception("Slack Notification Token is invalid")
33-
return True
35+
36+
start = time.time()
37+
time.clock()
38+
elapsed = 0
39+
while elapsed < WEBSOCKET_TIMEOUT:
40+
events = self.connection.rtm_read()
41+
if any(event['type'] == 'hello' for event in events):
42+
return True
43+
elapsed = time.time() - start
44+
time.sleep(0.5)
45+
46+
raise RuntimeError("Slack Notification unable to establish websocket connection after {} seconds".format(WEBSOCKET_TIMEOUT))
3447

3548
def close(self):
3649
if self.connection is None:

0 commit comments

Comments
 (0)