diff options
202 files changed, 1137 insertions, 2046 deletions
@@ -120,7 +120,7 @@ virtualenv_ansible: mkdir $(VENV_BASE); \ fi; \ if [ ! -d "$(VENV_BASE)/ansible" ]; then \ - virtualenv --system-site-packages $(VENV_BASE)/ansible && \ + virtualenv -p python --system-site-packages $(VENV_BASE)/ansible && \ $(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \ $(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \ $(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \ @@ -133,10 +133,8 @@ virtualenv_awx: mkdir $(VENV_BASE); \ fi; \ if [ ! -d "$(VENV_BASE)/awx" ]; then \ - virtualenv --system-site-packages $(VENV_BASE)/awx && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \ + python36 -m ensurepip --upgrade && \ + python36 -m venv --system-site-packages $(VENV_BASE)/awx; \ fi; \ fi @@ -155,10 +153,8 @@ requirements_ansible_dev: requirements_isolated: if [ ! -d "$(VENV_BASE)/awx" ]; then \ - virtualenv --system-site-packages $(VENV_BASE)/awx && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==35.0.2 && \ - $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \ + python36 -m ensurepip --upgrade && \ + python36 -m venv --system-site-packages $(VENV_BASE)/awx; \ fi; $(VENV_BASE)/awx/bin/pip install -r requirements/requirements_isolated.txt @@ -195,7 +191,7 @@ version_file: if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \ + python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \ # Do any one-time init tasks. comma := , diff --git a/awx/api/filters.py b/awx/api/filters.py index 2c8ed0009d..4aa2a8385d 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -65,7 +65,7 @@ class TypeFilterBackend(BaseFilterBackend): model = queryset.model model_type = get_type_for_model(model) if 'polymorphic_ctype' in get_all_field_names(model): - types_pks = set([v for k,v in types_map.items() if k in types]) + types_pks = set([v for k, v in types_map.items() if k in types]) queryset = queryset.filter(polymorphic_ctype_id__in=types_pks) elif model_type in types: queryset = queryset @@ -192,7 +192,7 @@ class FieldLookupBackend(BaseFilterBackend): def value_to_python(self, model, lookup, value): try: - lookup = lookup.encode("ascii") + lookup.encode("ascii") except UnicodeEncodeError: raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup) @@ -363,12 +363,12 @@ class FieldLookupBackend(BaseFilterBackend): args.append(q) if search_filters and search_filter_relation == 'OR': q = Q() - for term, constrains in search_filters.iteritems(): + for term, constrains in search_filters.items(): for constrain in constrains: q |= Q(**{constrain: term}) args.append(q) elif search_filters and search_filter_relation == 'AND': - for term, constrains in search_filters.iteritems(): + for term, constrains in search_filters.items(): q_chain = Q() for constrain in constrains: q_chain |= Q(**{constrain: term}) diff --git a/awx/api/generics.py b/awx/api/generics.py index abd0ae679d..2bc90373b9 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -6,7 +6,7 @@ import inspect import logging import time import six -import urllib +import urllib.parse # Django from django.conf import settings @@ -91,8 +91,9 @@ class LoggedLoginView(auth_views.LoginView): ret.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) current_user = JSONRenderer().render(current_user.data) - current_user = urllib.quote('%s' % current_user, '') + current_user = urllib.parse.quote('%s' % current_user, '') ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) + return ret else: ret.status_code = 401 @@ -304,7 +305,7 @@ class APIView(views.APIView): # submitted data was rejected. request_method = getattr(self, '_raw_data_request_method', None) response_status = getattr(self, '_raw_data_response_status', 0) - if request_method in ('POST', 'PUT', 'PATCH') and response_status in xrange(400, 500): + if request_method in ('POST', 'PUT', 'PATCH') and response_status in range(400, 500): return self.request.data.copy() return data @@ -347,7 +348,7 @@ class GenericAPIView(generics.GenericAPIView, APIView): # form. if hasattr(self, '_raw_data_form_marker'): # Always remove read only fields from serializer. - for name, field in serializer.fields.items(): + for name, field in list(serializer.fields.items()): if getattr(field, 'read_only', None): del serializer.fields[name] serializer._data = self.update_raw_data(serializer.data) @@ -747,7 +748,7 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView): def update_raw_data(self, data): request_method = getattr(self, '_raw_data_request_method', None) response_status = getattr(self, '_raw_data_response_status', 0) - if request_method == 'POST' and response_status in xrange(400, 500): + if request_method == 'POST' and response_status in range(400, 500): return super(SubListAttachDetachAPIView, self).update_raw_data(data) return {'id': None} diff --git a/awx/api/metadata.py b/awx/api/metadata.py index d71b1cd1bd..c5421ff86a 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -157,7 +157,7 @@ class Metadata(metadata.SimpleMetadata): finally: view.request = request - for field, meta in actions[method].items(): + for field, meta in list(actions[method].items()): if not isinstance(meta, dict): continue diff --git a/awx/api/parsers.py b/awx/api/parsers.py index 5f26937c45..057f5437ad 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -5,6 +5,7 @@ import json # Django from django.conf import settings from django.utils import six +from django.utils.encoding import smart_str from django.utils.translation import ugettext_lazy as _ # Django REST Framework @@ -25,7 +26,7 @@ class JSONParser(parsers.JSONParser): encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) try: - data = stream.read().decode(encoding) + data = smart_str(stream.read(), encoding=encoding) if not data: return {} obj = json.loads(data, object_pairs_hook=OrderedDict) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 14d74af424..6a82f395d0 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -8,7 +8,7 @@ import logging import operator import re import six -import urllib +import urllib.parse from collections import OrderedDict from datetime import timedelta @@ -40,6 +40,7 @@ from rest_framework.utils.serializer_helpers import ReturnList from polymorphic.models import PolymorphicModel # AWX +from awx.main.access import get_user_capabilities from awx.main.constants import ( SCHEDULEABLE_PROVIDERS, ANSI_SGR_PATTERN, @@ -49,7 +50,6 @@ from awx.main.constants import ( ) from awx.main.models import * # noqa from awx.main.models.base import NEW_JOB_TYPE_CHOICES -from awx.main.access import get_user_capabilities from awx.main.fields import ImplicitRoleField from awx.main.utils import ( get_type_for_model, get_model_for_type, timestamp_apiformat, @@ -203,11 +203,11 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass): @staticmethod def _is_list_of_strings(x): - return isinstance(x, (list, tuple)) and all([isinstance(y, basestring) for y in x]) + return isinstance(x, (list, tuple)) and all([isinstance(y, str) for y in x]) @staticmethod def _is_extra_kwargs(x): - return isinstance(x, dict) and all([isinstance(k, basestring) and isinstance(v, dict) for k,v in x.items()]) + return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k,v in x.items()]) @classmethod def _update_meta(cls, base, meta, other=None): @@ -259,9 +259,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass): return super(BaseSerializerMetaclass, cls).__new__(cls, name, bases, attrs) -class BaseSerializer(serializers.ModelSerializer): - - __metaclass__ = BaseSerializerMetaclass +class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetaclass): class Meta: fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created', @@ -284,7 +282,7 @@ class BaseSerializer(serializers.ModelSerializer): # The following lines fix the problem of being able to pass JSON dict into PrimaryKeyRelatedField. data = kwargs.get('data', False) if data: - for field_name, field_instance in six.iteritems(self.fields): + for field_name, field_instance in self.fields.items(): if isinstance(field_instance, ManyRelatedField) and not field_instance.read_only: if isinstance(data.get(field_name, False), dict): raise serializers.ValidationError(_('Cannot use dictionary for %s' % field_name)) @@ -294,7 +292,7 @@ class BaseSerializer(serializers.ModelSerializer): """ The request version component of the URL as an integer i.e., 1 or 2 """ - return get_request_version(self.context.get('request')) + return get_request_version(self.context.get('request')) or 1 def get_type(self, obj): return get_type_for_model(self.Meta.model) @@ -612,7 +610,7 @@ class BaseSerializer(serializers.ModelSerializer): v2.extend(e) else: v2.append(e) - d[k] = map(force_text, v2) + d[k] = list(map(force_text, v2)) raise ValidationError(d) return attrs @@ -632,9 +630,7 @@ class EmptySerializer(serializers.Serializer): pass -class BaseFactSerializer(BaseSerializer): - - __metaclass__ = BaseSerializerMetaclass +class BaseFactSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass): def get_fields(self): ret = super(BaseFactSerializer, self).get_fields() @@ -2139,10 +2135,10 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt return attrs.get(fd, self.instance and getattr(self.instance, fd) or None) if get_field_from_model_or_attrs('source') != 'scm': - redundant_scm_fields = filter( + redundant_scm_fields = list(filter( lambda x: attrs.get(x, None), ['source_project', 'source_path', 'update_on_project_update'] - ) + )) if redundant_scm_fields: raise serializers.ValidationError( {"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))} @@ -2465,17 +2461,17 @@ class CredentialTypeSerializer(BaseSerializer): field['help_text'] = _(field['help_text']) if field['type'] == 'become_method': field.pop('type') - field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS) + field['choices'] = list(map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)) return value def filter_field_metadata(self, fields, method): # API-created/modified CredentialType kinds are limited to # `cloud` and `net` if method in ('PUT', 'POST'): - fields['kind']['choices'] = filter( + fields['kind']['choices'] = list(filter( lambda choice: choice[0] in ('cloud', 'net'), fields['kind']['choices'] - ) + )) return fields @@ -2626,8 +2622,8 @@ class CredentialSerializer(BaseSerializer): raise serializers.ValidationError({"kind": _('"%s" is not a valid choice' % kind)}) data['credential_type'] = credential_type.pk value = OrderedDict( - {'credential_type': credential_type}.items() + - super(CredentialSerializer, self).to_internal_value(data).items() + list({'credential_type': credential_type}.items()) + + list(super(CredentialSerializer, self).to_internal_value(data).items()) ) # Make a set of the keys in the POST/PUT payload @@ -3487,12 +3483,16 @@ class AdHocCommandSerializer(UnifiedJobSerializer): ret['name'] = obj.module_name return ret + def validate(self, attrs): + ret = super(AdHocCommandSerializer, self).validate(attrs) + return ret + def validate_extra_vars(self, value): redacted_extra_vars, removed_vars = extract_ansible_vars(value) if removed_vars: raise serializers.ValidationError(_( "{} are prohibited from use in ad hoc commands." - ).format(", ".join(removed_vars))) + ).format(", ".join(sorted(removed_vars, reverse=True)))) return vars_validate_or_raise(value) @@ -3720,7 +3720,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): for field in self.instance._meta.fields: setattr(mock_obj, field.name, getattr(self.instance, field.name)) field_names = set(field.name for field in self.Meta.model._meta.fields) - for field_name, value in attrs.items(): + for field_name, value in list(attrs.items()): setattr(mock_obj, field_name, value) if field_name not in field_names: attrs.pop(field_name) @@ -4490,11 +4490,11 @@ class NotificationTemplateSerializer(BaseSerializer): model = NotificationTemplate fields = ('*', 'organization', 'notification_type', 'notification_configuration') - type_map = {"string": (str, unicode), + type_map = {"string": (str,), "int": (int,), "bool": (bool,), "list": (list,), - "password": (str, unicode), + "password": (str,), "object": (dict, OrderedDict)} def to_representation(self, obj): @@ -4877,7 +4877,7 @@ class ActivityStreamSerializer(BaseSerializer): for key in summary_dict.keys(): if 'id' not in summary_dict[key]: summary_dict[key] = summary_dict[key] + ('id',) - field_list = summary_dict.items() + field_list = list(summary_dict.items()) # Needed related fields that are not in the default summary fields field_list += [ ('workflow_job_template_node', ('id', 'unified_job_template_id')), @@ -4897,7 +4897,7 @@ class ActivityStreamSerializer(BaseSerializer): def get_fields(self): ret = super(ActivityStreamSerializer, self).get_fields() - for key, field in ret.items(): + for key, field in list(ret.items()): if key == 'changes': field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted') if key == 'object1': @@ -5039,7 +5039,7 @@ class FactVersionSerializer(BaseFactSerializer): } res['fact_view'] = '%s?%s' % ( reverse('api:host_fact_compare_view', kwargs={'pk': obj.host.pk}, request=self.context.get('request')), - urllib.urlencode(params) + urllib.parse.urlencode(params) ) return res diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 18f15fb61c..c883872198 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -517,7 +517,7 @@ class AuthView(APIView): from rest_framework.reverse import reverse data = OrderedDict() err_backend, err_message = request.session.get('social_auth_error', (None, None)) - auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items() + auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items()) # Return auth backends in consistent order: Google, GitHub, SAML. auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0]) for name, backend in auth_backends: @@ -2308,7 +2308,7 @@ class JobTemplateLaunch(RetrieveAPIView): raise ParseError({key: [msg], 'credentials': [msg]}) # add the deprecated credential specified in the request - if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, basestring): + if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, str): prompted_value = [prompted_value] # If user gave extra_credentials, special case to use exactly @@ -4459,7 +4459,7 @@ class RoleChildrenList(SubListAPIView): # in URL patterns and reverse URL lookups, converting CamelCase names to # lowercase_with_underscore (e.g. MyView.as_view() becomes my_view). this_module = sys.modules[__name__] -for attr, value in locals().items(): +for attr, value in list(locals().items()): if isinstance(value, type) and issubclass(value, APIView): name = camelcase_to_underscore(attr) view = value.as_view() diff --git a/awx/api/views/root.py b/awx/api/views/root.py index d33495d6db..3699dc423a 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -2,6 +2,7 @@ # All Rights Reserved. import logging +import operator import json from collections import OrderedDict @@ -161,7 +162,7 @@ class ApiV1PingView(APIView): for instance in Instance.objects.all(): response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)) - response['instances'].sort() + sorted(response['instances'], key=operator.itemgetter('node')) response['instance_groups'] = [] for instance_group in InstanceGroup.objects.all(): response['instance_groups'].append(dict(name=instance_group.name, diff --git a/awx/conf/fields.py b/awx/conf/fields.py index b98b925447..22aac520c5 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -1,6 +1,6 @@ # Python import logging -import urlparse +import urllib.parse as urlparse from collections import OrderedDict # Django @@ -71,7 +71,7 @@ class StringListBooleanField(ListField): return False elif value in NullBooleanField.NULL_VALUES: return None - elif isinstance(value, basestring): + elif isinstance(value, str): return self.child.to_representation(value) except TypeError: pass @@ -88,7 +88,7 @@ class StringListBooleanField(ListField): return False elif data in NullBooleanField.NULL_VALUES: return None - elif isinstance(data, basestring): + elif isinstance(data, str): return self.child.run_validation(data) except TypeError: pass diff --git a/awx/conf/management/commands/migrate_to_database_settings.py b/awx/conf/management/commands/migrate_to_database_settings.py index ec1da2ce1c..eb6e0f8629 100644 --- a/awx/conf/management/commands/migrate_to_database_settings.py +++ b/awx/conf/management/commands/migrate_to_database_settings.py @@ -460,10 +460,10 @@ class Command(BaseCommand): elif file_to_comment not in to_comment_patterns: to_comment_patterns.append(file_to_comment) # Run once in dry-run mode to catch any errors from updating the files. - diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=True, backup_suffix=self.backup_suffix) + diffs = comment_assignments(to_comment_patterns, list(to_comment.keys()), dry_run=True, backup_suffix=self.backup_suffix) # Then, if really updating, run again. if not self.dry_run and not self.no_comment: - diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=False, backup_suffix=self.backup_suffix) + diffs = comment_assignments(to_comment_patterns, list(to_comment.keys()), dry_run=False, backup_suffix=self.backup_suffix) if license_file_to_comment: diffs.extend(self._comment_license_file(dry_run=False)) if local_settings_file_to_comment: diff --git a/awx/conf/models.py b/awx/conf/models.py index d37b634fe0..2859650f54 100644 --- a/awx/conf/models.py +++ b/awx/conf/models.py @@ -33,7 +33,7 @@ class Setting(CreatedModifiedModel): on_delete=models.CASCADE, )) - def __unicode__(self): + def __str__(self): try: json_value = json.dumps(self.value) except ValueError: diff --git a/awx/conf/settings.py b/awx/conf/settings.py index cb74d45477..4719e3fe4a 100644 --- a/awx/conf/settings.py +++ b/awx/conf/settings.py @@ -6,11 +6,9 @@ import re import sys import threading import time -import StringIO import traceback -import urllib - -import six +import urllib.parse +from io import StringIO # Django from django.conf import LazySettings @@ -68,7 +66,7 @@ def normalize_broker_url(value): match = re.search('(amqp://[^:]+:)(.*)', parts[0]) if match: prefix, password = match.group(1), match.group(2) - parts[0] = prefix + urllib.quote(password) + parts[0] = prefix + urllib.parse.quote(password) return '@'.join(parts) @@ -98,14 +96,14 @@ def _ctit_db_wrapper(trans_safe=False): # We want the _full_ traceback with the context # First we get the current call stack, which constitutes the "top", # it has the context up to the point where the context manager is used - top_stack = StringIO.StringIO() + top_stack = StringIO() traceback.print_stack(file=top_stack) top_lines = top_stack.getvalue().strip('\n').split('\n') top_stack.close() # Get "bottom" stack from the local error that happened # inside of the "with" block this wraps exc_type, exc_value, exc_traceback = sys.exc_info() - bottom_stack = StringIO.StringIO() + bottom_stack = StringIO() traceback.print_tb(exc_traceback, file=bottom_stack) bottom_lines = bottom_stack.getvalue().strip('\n').split('\n') # Glue together top and bottom where overlap is found @@ -169,15 +167,6 @@ class EncryptedCacheProxy(object): def get(self, key, **kwargs): value = self.cache.get(key, **kwargs) value = self._handle_encryption(self.decrypter, key, value) - - # python-memcached auto-encodes unicode on cache set in python2 - # https://github.com/linsomniac/python-memcached/issues/79 - # https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961 - if six.PY2 and isinstance(value, six.binary_type): - try: - six.text_type(value) - except UnicodeDecodeError: - value = value.decode('utf-8') logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value)) return value diff --git a/awx/conf/signals.py b/awx/conf/signals.py index 453140241d..698b758b98 100644 --- a/awx/conf/signals.py +++ b/awx/conf/signals.py @@ -9,15 +9,11 @@ from django.core.cache import cache from django.dispatch import receiver # Tower -import awx.main.signals from awx.conf import settings_registry from awx.conf.models import Setting -from awx.conf.serializers import SettingSerializer logger = logging.getLogger('awx.conf.signals') -awx.main.signals.model_serializer_mapping[Setting] = SettingSerializer - __all__ = [] diff --git a/awx/conf/tests/functional/test_api.py b/awx/conf/tests/functional/test_api.py index 1be22ee2dc..15789c501b 100644 --- a/awx/conf/tests/functional/test_api.py +++ b/awx/conf/tests/functional/test_api.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from rest_framework import serializers diff --git a/awx/conf/tests/functional/test_reencrypt_migration.py b/awx/conf/tests/functional/test_reencrypt_migration.py deleted file mode 100644 index e138ddb77f..0000000000 --- a/awx/conf/tests/functional/test_reencrypt_migration.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2017 Ansible, Inc. -# All Rights Reserved. -import pytest -import mock - -from django.apps import apps -from awx.conf.migrations._reencrypt import ( - replace_aesecb_fernet, - encrypt_field, - decrypt_field, -) -from awx.conf.settings import Setting -from awx.main.utils import decrypt_field as new_decrypt_field - - -@pytest.mark.django_db -@pytest.mark.parametrize("old_enc, new_enc, value", [ - ('$encrypted$UTF8$AES', '$encrypted$UTF8$AESCBC$', u'Iñtërnâtiônà lizætiøn'), - ('$encrypted$AES$', '$encrypted$AESCBC$', 'test'), -]) -def test_settings(old_enc, new_enc, value): - with mock.patch('awx.conf.models.encrypt_field', encrypt_field): - with mock.patch('awx.conf.settings.decrypt_field', decrypt_field): - setting = Setting.objects.create(key='SOCIAL_AUTH_GITHUB_SECRET', value=value) - assert setting.value.startswith(old_enc) - - replace_aesecb_fernet(apps, None) - setting.refresh_from_db() - - assert setting.value.startswith(new_enc) - assert new_decrypt_field(setting, 'value') == value - - # This is here for a side-effect. - # Exception if the encryption type of AESCBC is not properly skipped, ensures - # our `startswith` calls don't have typos - replace_aesecb_fernet(apps, None) diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py index d290228056..2ff7dd685c 100644 --- a/awx/conf/tests/unit/test_settings.py +++ b/awx/conf/tests/unit/test_settings.py @@ -4,6 +4,7 @@ # All Rights Reserved. from contextlib import contextmanager +import codecs from uuid import uuid4 import time @@ -67,7 +68,7 @@ def test_cached_settings_unicode_is_auto_decoded(settings): # https://github.com/linsomniac/python-memcached/issues/79 # https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961 - value = six.u('Iñtërnâtiônà lizætiøn').encode('utf-8') # this simulates what python-memcached does on cache.set() + value = 'Iñtërnâtiônà lizætiøn' # this simulates what python-memcached does on cache.set() settings.cache.set('DEBUG', value) assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônà lizætiøn') @@ -262,7 +263,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted): encrypted=encrypted ) # this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79 - value = six.u('Iñtërnâtiônà lizætiøn').encode('utf-8') + value = 'Iñtërnâtiônà lizætiøn' setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value) mocks = mocker.Mock(**{ @@ -272,8 +273,8 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted): }), }) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks): - assert settings.AWX_SOME_SETTING == six.u('Iñtërnâtiônà lizætiøn') - assert settings.cache.get('AWX_SOME_SETTING') == six.u('Iñtërnâtiônà lizætiøn') + assert settings.AWX_SOME_SETTING == 'Iñtërnâtiônà lizætiøn' + assert settings.cache.get('AWX_SOME_SETTING') == 'Iñtërnâtiônà lizætiøn' @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @@ -434,7 +435,7 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker): def rot13(obj, attribute): assert obj.pk == 123 - return getattr(obj, attribute).encode('rot13') + return codecs.encode(getattr(obj, attribute), 'rot_13') native_cache = LocMemCache(str(uuid4()), {}) cache = EncryptedCacheProxy( @@ -471,7 +472,7 @@ def test_readonly_sensitive_cache_data_is_encrypted(settings): def rot13(obj, attribute): assert obj.pk is None - return getattr(obj, attribute).encode('rot13') + return codecs.encode(getattr(obj, attribute), 'rot_13') native_cache = LocMemCache(str(uuid4()), {}) cache = EncryptedCacheProxy( diff --git a/awx/conf/utils.py b/awx/conf/utils.py index e984502691..7184cddbb2 100755 --- a/awx/conf/utils.py +++ b/awx/conf/utils.py @@ -102,7 +102,7 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup if not dry_run: if backup_filename: shutil.copy2(filename, backup_filename) - with open(filename, 'wb') as fileobj: + with open(filename, 'w') as fileobj: fileobj.write(new_file_data) return '\n'.join(diff_lines) diff --git a/awx/conf/views.py b/awx/conf/views.py index e10fe7ad32..3e654f2add 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -72,7 +72,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView): def get_queryset(self): self.category_slug = self.kwargs.get('category_slug', 'all') - all_category_slugs = settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys() + all_category_slugs = list(settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys()) for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]: all_category_slugs.remove(slug_to_delete) if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False): @@ -123,7 +123,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView): if key == 'LICENSE' or settings_registry.is_setting_read_only(key): continue if settings_registry.is_setting_encrypted(key) and \ - isinstance(value, basestring) and \ + isinstance(value, str) and \ value.startswith('$encrypted$'): continue setattr(serializer.instance, key, value) @@ -210,7 +210,7 @@ class SettingLoggingTest(GenericAPIView): # in URL patterns and reverse URL lookups, converting CamelCase names to # lowercase_with_underscore (e.g. MyView.as_view() becomes my_view). this_module = sys.modules[__name__] -for attr, value in locals().items(): +for attr, value in list(locals().items()): if isinstance(value, type) and issubclass(value, APIView): name = camelcase_to_underscore(attr) view = value.as_view() diff --git a/awx/lib/awx_display_callback/events.py b/awx/lib/awx_display_callback/events.py index d22b93b8e4..329db14877 100644 --- a/awx/lib/awx_display_callback/events.py +++ b/awx/lib/awx_display_callback/events.py @@ -35,8 +35,6 @@ except ImportError: os.environ['VIRTUAL_ENV'] )) -from six.moves import xrange - __all__ = ['event_context'] @@ -154,7 +152,7 @@ class EventContext(object): if event not in ('playbook_on_stats',) and "res" in event_data and len(str(event_data['res'])) > max_res: event_data['res'] = {} event_dict = dict(event=event, event_data=event_data) - for key in event_data.keys(): + for key in list(event_data.keys()): if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',): event_dict[key] = event_data.pop(key) elif key in ('verbosity', 'pid'): @@ -165,11 +163,11 @@ class EventContext(object): return {} def dump(self, fileobj, data, max_width=78, flush=False): - b64data = base64.b64encode(json.dumps(data)) + b64data = base64.b64encode(json.dumps(data).encode('utf-8')).decode() with self.display_lock: # pattern corresponding to OutputEventFilter expectation fileobj.write(u'\x1b[K') - for offset in xrange(0, len(b64data), max_width): + for offset in range(0, len(b64data), max_width): chunk = b64data[offset:offset + max_width] escaped_chunk = u'{}\x1b[{}D'.format(chunk, len(chunk)) fileobj.write(escaped_chunk) @@ -179,7 +177,7 @@ class EventContext(object): def dump_begin(self, fileobj): begin_dict = self.get_begin_dict() - self.cache.set(":1:ev-{}".format(begin_dict['uuid']), begin_dict) + self.cache.set(":1:ev-{}".format(begin_dict['uuid']), json.dumps(begin_dict)) self.dump(fileobj, {'uuid': begin_dict['uuid']}) def dump_end(self, fileobj): diff --git a/awx/lib/tests/test_display_callback.py b/awx/lib/tests/test_display_callback.py index d07a6f4604..01ed0bbd74 100644 --- a/awx/lib/tests/test_display_callback.py +++ b/awx/lib/tests/test_display_callback.py @@ -5,11 +5,11 @@ from __future__ import absolute_import from collections import OrderedDict import json -import mock import os import shutil import sys import tempfile +from unittest import mock import pytest diff --git a/awx/main/access.py b/awx/main/access.py index 1dfbb924a2..e7daf2db5e 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1397,6 +1397,8 @@ class JobTemplateAccess(BaseAccess): ] for k, v in data.items(): + if k not in [x.name for x in obj._meta.concrete_fields]: + continue if hasattr(obj, k) and getattr(obj, k) != v: if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \ and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys diff --git a/awx/main/consumers.py b/awx/main/consumers.py index eb2afe9ab0..aee25e8ff9 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -4,6 +4,7 @@ import logging from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user +from django.utils.encoding import smart_str from django.http.cookie import parse_cookie from django.core.serializers.json import DjangoJSONEncoder @@ -30,7 +31,7 @@ def ws_connect(message): # store the valid CSRF token from the cookie so we can compare it later # on ws_receive cookie_token = parse_cookie( - headers.get('cookie') + smart_str(headers.get(b'cookie')) ).get('csrftoken') if cookie_token: message.channel_session[XRF_KEY] = cookie_token diff --git a/awx/main/dispatch/__init__.py b/awx/main/dispatch/__init__.py index ac4ef421df..50f912427e 100644 --- a/awx/main/dispatch/__init__.py +++ b/awx/main/dispatch/__init__.py @@ -2,4 +2,4 @@ from django.conf import settings def get_local_queuename(): - return settings.CLUSTER_HOST_ID.encode('utf-8') + return settings.CLUSTER_HOST_ID diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index e59e556fa9..391809097d 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -8,7 +8,7 @@ from uuid import uuid4 import collections from multiprocessing import Process from multiprocessing import Queue as MPQueue -from Queue import Full as QueueFull, Empty as QueueEmpty +from queue import Full as QueueFull, Empty as QueueEmpty from django.conf import settings from django.db import connection as django_connection, connections @@ -129,7 +129,7 @@ class PoolWorker(object): # the task at [0] is the one that's running right now (or is about to # be running) if len(self.managed_tasks): - return self.managed_tasks[self.managed_tasks.keys()[0]] + return self.managed_tasks[list(self.managed_tasks.keys())[0]] return None @@ -180,7 +180,7 @@ class WorkerPool(object): class MessagePrinter(awx.main.dispatch.worker.BaseWorker): def perform_work(self, body): - print body + print(body) pool = WorkerPool(min_workers=4) # spawn four worker processes pool.init_workers(MessagePrint().work_loop) @@ -253,7 +253,7 @@ class WorkerPool(object): return tmpl.render(pool=self, workers=self.workers, meta=self.debug_meta) def write(self, preferred_queue, body): - queue_order = sorted(range(len(self.workers)), cmp=lambda x, y: -1 if x==preferred_queue else 0) + queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x==preferred_queue else x) write_attempt_order = [] for queue_actual in queue_order: try: @@ -365,7 +365,7 @@ class AutoscalePool(WorkerPool): running_uuids = [] for worker in self.workers: worker.calculate_managed_tasks() - running_uuids.extend(worker.managed_tasks.keys()) + running_uuids.extend(list(worker.managed_tasks.keys())) try: reaper.reap(excluded_uuids=running_uuids) except Exception: diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py index 739998ca96..12b9664d1e 100644 --- a/awx/main/dispatch/publish.py +++ b/awx/main/dispatch/publish.py @@ -45,7 +45,7 @@ class task: @task(queue='tower_broadcast', exchange_type='fanout') def announce(): - print "Run this everywhere!" + print("Run this everywhere!") """ def __init__(self, queue=None, exchange_type=None): diff --git a/awx/main/dispatch/worker/base.py b/awx/main/dispatch/worker/base.py index f878b459c4..c9674caaad 100644 --- a/awx/main/dispatch/worker/base.py +++ b/awx/main/dispatch/worker/base.py @@ -5,7 +5,7 @@ import os import logging import signal from uuid import UUID -from Queue import Empty as QueueEmpty +from queue import Empty as QueueEmpty from django import db from kombu import Producer diff --git a/awx/main/expect/isolated_manager.py b/awx/main/expect/isolated_manager.py index af0fa2ed39..9aee4703db 100644 --- a/awx/main/expect/isolated_manager.py +++ b/awx/main/expect/isolated_manager.py @@ -1,6 +1,5 @@ import base64 import codecs -import StringIO import json import os import shutil @@ -9,8 +8,10 @@ import tempfile import time import logging from distutils.version import LooseVersion as Version +from io import StringIO from django.conf import settings +from django.utils.encoding import smart_bytes, smart_str import awx from awx.main.expect import run @@ -144,7 +145,7 @@ class IsolatedManager(object): # if an ssh private key fifo exists, read its contents and delete it if self.ssh_key_path: - buff = StringIO.StringIO() + buff = StringIO() with open(self.ssh_key_path, 'r') as fifo: for line in fifo: buff.write(line) @@ -156,7 +157,10 @@ class IsolatedManager(object): # into a variable, and will replicate the data into a named pipe on the # isolated instance secrets_path = os.path.join(self.private_data_dir, 'env') - run.open_fifo_write(secrets_path, base64.b64encode(json.dumps(secrets))) + run.open_fifo_write( + secrets_path, + smart_str(base64.b64encode(smart_bytes(json.dumps(secrets)))) + ) self.build_isolated_job_data() @@ -176,7 +180,7 @@ class IsolatedManager(object): args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars) if self.instance.verbosity: args.append('-%s' % ('v' * min(5, self.instance.verbosity))) - buff = StringIO.StringIO() + buff = StringIO() logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id)) status, rc = IsolatedManager.run_pexpect( args, self.awx_playbook_path(), self.management_env, buff, @@ -246,7 +250,7 @@ class IsolatedManager(object): os.makedirs(self.path_to('artifacts', 'job_events'), mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR) def _missing_artifacts(self, path_list, output): - missing_artifacts = filter(lambda path: not os.path.exists(path), path_list) + missing_artifacts = list(filter(lambda path: not os.path.exists(path), path_list)) for path in missing_artifacts: self.stdout_handle.write('ansible did not exit cleanly, missing `{}`.\n'.format(path)) if missing_artifacts: @@ -284,7 +288,7 @@ class IsolatedManager(object): status = 'failed' output = '' rc = None - buff = StringIO.StringIO() + buff = StringIO() last_check = time.time() seek = 0 job_timeout = remaining = self.job_timeout @@ -305,7 +309,7 @@ class IsolatedManager(object): time.sleep(1) continue - buff = StringIO.StringIO() + buff = StringIO() logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id)) status, rc = IsolatedManager.run_pexpect( args, self.awx_playbook_path(), self.management_env, buff, @@ -342,7 +346,7 @@ class IsolatedManager(object): elif status == 'failed': # if we were unable to retrieve job reults from the isolated host, # print stdout of the `check_isolated.yml` playbook for clues - self.stdout_handle.write(output) + self.stdout_handle.write(smart_str(output)) return status, rc @@ -357,7 +361,7 @@ class IsolatedManager(object): } args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars) logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id)) - buff = StringIO.StringIO() + buff = StringIO() timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT) status, rc = IsolatedManager.run_pexpect( args, self.awx_playbook_path(), self.management_env, buff, diff --git a/awx/main/expect/run.py b/awx/main/expect/run.py index 96d7e6ce90..b4f0b094e2 100755 --- a/awx/main/expect/run.py +++ b/awx/main/expect/run.py @@ -4,7 +4,6 @@ import argparse import base64 import codecs import collections -import StringIO import logging import json import os @@ -13,12 +12,12 @@ import pipes import re import signal import sys -import thread +import _thread import time +from io import StringIO import pexpect import psutil -import six logger = logging.getLogger('awx.main.utils.expect') @@ -49,7 +48,7 @@ def open_fifo_write(path, data): reads data from the pipe. ''' os.mkfifo(path, 0o600) - thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data)) + _thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data)) def run_pexpect(args, cwd, env, logfile, @@ -97,14 +96,8 @@ def run_pexpect(args, cwd, env, logfile, # enforce usage of an OrderedDict so that the ordering of elements in # `keys()` matches `values()`. expect_passwords = collections.OrderedDict(expect_passwords) - password_patterns = expect_passwords.keys() - password_values = expect_passwords.values() - - # pexpect needs all env vars to be utf-8 encoded strings - # https://github.com/pexpect/pexpect/issues/512 - for k, v in env.items(): - if isinstance(v, six.text_type): - env[k] = v.encode('utf-8') + password_patterns = list(expect_passwords.keys()) + password_values = list(expect_passwords.values()) child = pexpect.spawn( args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True, @@ -232,7 +225,7 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True): instance's cancel_flag. ''' try: - if proot_cmd in ' '.join(args): + if proot_cmd.encode('utf-8') in args: if not psutil: os.kill(pid, signal.SIGKILL) else: @@ -253,7 +246,7 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True): def __run__(private_data_dir): - buff = StringIO.StringIO() + buff = StringIO() with open(os.path.join(private_data_dir, 'env'), 'r') as f: for line in f: buff.write(line) diff --git a/awx/main/fields.py b/awx/main/fields.py index 7be0e3b281..2d0f72f602 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -7,7 +7,7 @@ import json import operator import re import six -import urllib +import urllib.parse from jinja2 import Environment, StrictUndefined from jinja2.exceptions import UndefinedError, TemplateSyntaxError @@ -251,6 +251,9 @@ class ImplicitRoleField(models.ForeignKey): if type(field_name) == tuple: continue + if type(field_name) == bytes: + field_name = field_name.decode('utf-8') + if field_name.startswith('singleton:'): continue @@ -373,7 +376,7 @@ class SmartFilterField(models.TextField): # https://docs.python.org/2/library/stdtypes.html#truth-value-testing if not value: return None - value = urllib.unquote(value) + value = urllib.parse.unquote(value) try: SmartFilter().query_from_string(value) except RuntimeError as e: @@ -407,9 +410,6 @@ class JSONSchemaField(JSONBField): self.schema(model_instance), format_checker=self.format_checker ).iter_errors(value): - # strip Python unicode markers from jsonschema validation errors - error.message = re.sub(r'\bu(\'|")', r'\1', error.message) - if error.validator == 'pattern' and 'error' in error.schema: error.message = six.text_type(error.schema['error']).format(instance=error.instance) elif error.validator == 'type': @@ -514,10 +514,10 @@ class CredentialInputField(JSONSchemaField): field = field.copy() if field['type'] == 'become_method': field.pop('type') - field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS) + field['choices'] = list(map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)) properties[field['id']] = field if field.get('choices', []): - field['enum'] = field['choices'][:] + field['enum'] = list(field['choices'])[:] return { 'type': 'object', 'properties': properties, @@ -824,14 +824,14 @@ class CredentialTypeInjectorField(JSONSchemaField): ) class ExplodingNamespace: - def __unicode__(self): + def __str__(self): raise UndefinedError(_('Must define unnamed file injector in order to reference `tower.filename`.')) class TowerNamespace: def __init__(self): self.filename = ExplodingNamespace() - def __unicode__(self): + def __str__(self): raise UndefinedError(_('Cannot directly reference reserved `tower` namespace container.')) valid_namespace['tower'] = TowerNamespace() diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index d8332e0209..fc127addac 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -155,6 +155,8 @@ class AnsibleInventoryLoader(object): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) stdout, stderr = proc.communicate() + stdout = smart_text(stdout) + stderr = smart_text(stderr) if self.tmp_private_dir: shutil.rmtree(self.tmp_private_dir, True) @@ -186,7 +188,7 @@ class AnsibleInventoryLoader(object): data.setdefault('_meta', {}) data['_meta'].setdefault('hostvars', {}) logger.warning('Re-calling script for hostvars individually.') - for group_name, group_data in data.iteritems(): + for group_name, group_data in list(data.items()): if group_name == '_meta': continue @@ -347,7 +349,7 @@ class Command(BaseCommand): if enabled is not default: enabled_value = getattr(self, 'enabled_value', None) if enabled_value is not None: - enabled = bool(unicode(enabled_value) == unicode(enabled)) + enabled = bool(str(enabled_value) == str(enabled)) else: enabled = bool(enabled) if enabled is default: @@ -369,9 +371,9 @@ class Command(BaseCommand): try: self.inventory = Inventory.objects.get(**q) except Inventory.DoesNotExist: - raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0]) + raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0]) except Inventory.MultipleObjectsReturned: - raise CommandError('Inventory with %s = %s returned multiple results' % q.items()[0]) + raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0]) logger.info('Updating inventory %d: %s' % (self.inventory.pk, self.inventory.name)) @@ -471,7 +473,7 @@ class Command(BaseCommand): if self.instance_id_var: all_instance_ids = self.mem_instance_id_map.keys() instance_ids = [] - for offset in xrange(0, len(all_instance_ids), self._batch_size): + for offset in range(0, len(all_instance_ids), self._batch_size): instance_ids = all_instance_ids[offset:(offset + self._batch_size)] for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True): del_host_pks.discard(host_pk) @@ -479,14 +481,14 @@ class Command(BaseCommand): del_host_pks.discard(host_pk) all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys())) else: - all_host_names = self.all_group.all_hosts.keys() - for offset in xrange(0, len(all_host_names), self._batch_size): + all_host_names = list(self.all_group.all_hosts.keys()) + for offset in range(0, len(all_host_names), self._batch_size): host_names = all_host_names[offset:(offset + self._batch_size)] for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True): del_host_pks.discard(host_pk) # Now delete all remaining hosts in batches. all_del_pks = sorted(list(del_host_pks)) - for offset in xrange(0, len(all_del_pks), self._batch_size): + for offset in range(0, len(all_del_pks), self._batch_size): del_pks = all_del_pks[offset:(offset + self._batch_size)] for host in hosts_qs.filter(pk__in=del_pks): host_name = host.name @@ -509,8 +511,8 @@ class Command(BaseCommand): groups_qs = self.inventory_source.groups.all() # Build list of all group pks, remove those that should not be deleted. del_group_pks = set(groups_qs.values_list('pk', flat=True)) - all_group_names = self.all_group.all_groups.keys() - for offset in xrange(0, len(all_group_names), self._batch_size): + all_group_names = list(self.all_group.all_groups.keys()) + for offset in range(0, len(all_group_names), self._batch_size): group_names = all_group_names[offset:(offset + self._batch_size)] for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True): del_group_pks.discard(group_pk) @@ -522,7 +524,7 @@ class Command(BaseCommand): del_group_pks.discard(self.inventory_source.deprecated_group_id) # Now delete all remaining groups in batches. all_del_pks = sorted(list(del_group_pks)) - for offset in xrange(0, len(all_del_pks), self._batch_size): + for offset in range(0, len(all_del_pks), self._batch_size): del_pks = all_del_pks[offset:(offset + self._batch_size)] for group in groups_qs.filter(pk__in=del_pks): group_name = group.name @@ -561,7 +563,7 @@ class Command(BaseCommand): for mem_group in mem_children: db_children_name_pk_map.pop(mem_group.name, None) del_child_group_pks = list(set(db_children_name_pk_map.values())) - for offset in xrange(0, len(del_child_group_pks), self._batch_size): + for offset in range(0, len(del_child_group_pks), self._batch_size): child_group_pks = del_child_group_pks[offset:(offset + self._batch_size)] for db_child in db_children.filter(pk__in=child_group_pks): group_group_count += 1 @@ -574,12 +576,12 @@ class Command(BaseCommand): del_host_pks = set(db_hosts.values_list('pk', flat=True)) mem_hosts = self.all_group.all_groups[db_group.name].hosts all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id] - for offset in xrange(0, len(all_mem_host_names), self._batch_size): + for offset in range(0, len(all_mem_host_names), self._batch_size): mem_host_names = all_mem_host_names[offset:(offset + self._batch_size)] for db_host_pk in db_hosts.filter(name__in=mem_host_names).values_list('pk', flat=True): del_host_pks.discard(db_host_pk) all_mem_instance_ids = [h.instance_id for h in mem_hosts if h.instance_id] - for offset in xrange(0, len(all_mem_instance_ids), self._batch_size): + for offset in range(0, len(all_mem_instance_ids), self._batch_size): mem_instance_ids = all_mem_instance_ids[offset:(offset + self._batch_size)] for db_host_pk in db_hosts.filter(instance_id__in=mem_instance_ids).values_list('pk', flat=True): del_host_pks.discard(db_host_pk) @@ -587,7 +589,7 @@ class Command(BaseCommand): for db_host_pk in all_db_host_pks: del_host_pks.discard(db_host_pk) del_host_pks = list(del_host_pks) - for offset in xrange(0, len(del_host_pks), self._batch_size): + for offset in range(0, len(del_host_pks), self._batch_size): del_pks = del_host_pks[offset:(offset + self._batch_size)] for db_host in db_hosts.filter(pk__in=del_pks): group_host_count += 1 @@ -635,7 +637,7 @@ class Command(BaseCommand): if len(v.parents) == 1 and v.parents[0].name == 'all': root_group_names.add(k) existing_group_names = set() - for offset in xrange(0, len(all_group_names), self._batch_size): + for offset in range(0, len(all_group_names), self._batch_size): group_names = all_group_names[offset:(offset + self._batch_size)] for group in self.inventory.groups.filter(name__in=group_names): mem_group = self.all_group.all_groups[group.name] @@ -739,7 +741,7 @@ class Command(BaseCommand): mem_host_instance_id_map = {} mem_host_name_map = {} mem_host_names_to_update = set(self.all_group.all_hosts.keys()) - for k,v in self.all_group.all_hosts.iteritems(): + for k,v in self.all_group.all_hosts.items(): mem_host_name_map[k] = v instance_id = self._get_instance_id(v.variables) if instance_id in self.db_instance_id_map: @@ -749,7 +751,7 @@ class Command(BaseCommand): # Update all existing hosts where we know the PK based on instance_id. all_host_pks = sorted(mem_host_pk_map.keys()) - for offset in xrange(0, len(all_host_pks), self._batch_size): + for offset in range(0, len(all_host_pks), self._batch_size): host_pks = all_host_pks[offset:(offset + self._batch_size)] for db_host in self.inventory.hosts.filter( pk__in=host_pks): if db_host.pk in host_pks_updated: @@ -761,7 +763,7 @@ class Command(BaseCommand): # Update all existing hosts where we know the instance_id. all_instance_ids = sorted(mem_host_instance_id_map.keys()) - for offset in xrange(0, len(all_instance_ids), self._batch_size): + for offset in range(0, len(all_instance_ids), self._batch_size): instance_ids = all_instance_ids[offset:(offset + self._batch_size)] for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids): if db_host.pk in host_pks_updated: @@ -773,7 +775,7 @@ class Command(BaseCommand): # Update all existing hosts by name. all_host_names = sorted(mem_host_name_map.keys()) - for offset in xrange(0, len(all_host_names), self._batch_size): + for offset in range(0, len(all_host_names), self._batch_size): host_names = all_host_names[offset:(offset + self._batch_size)] for db_host in self.inventory.hosts.filter( name__in=host_names): if db_host.pk in host_pks_updated: @@ -815,15 +817,15 @@ class Command(BaseCommand): ''' if settings.SQL_DEBUG: queries_before = len(connection.queries) - all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.children]) + all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.children]) group_group_count = 0 - for offset in xrange(0, len(all_group_names), self._batch_size): + for offset in range(0, len(all_group_names), self._batch_size): group_names = all_group_names[offset:(offset + self._batch_size)] for db_group in self.inventory.groups.filter(name__in=group_names): mem_group = self.all_group.all_groups[db_group.name] group_group_count += len(mem_group.children) all_child_names = sorted([g.name for g in mem_group.children]) - for offset2 in xrange(0, len(all_child_names), self._batch_size): + for offset2 in range(0, len(all_child_names), self._batch_size): child_names = all_child_names[offset2:(offset2 + self._batch_size)] db_children_qs = self.inventory.groups.filter(name__in=child_names) for db_child in db_children_qs.filter(children__id=db_group.id): @@ -842,15 +844,15 @@ class Command(BaseCommand): # belongs. if settings.SQL_DEBUG: queries_before = len(connection.queries) - all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.hosts]) + all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.hosts]) group_host_count = 0 - for offset in xrange(0, len(all_group_names), self._batch_size): + for offset in range(0, len(all_group_names), self._batch_size): group_names = all_group_names[offset:(offset + self._batch_size)] for db_group in self.inventory.groups.filter(name__in=group_names): mem_group = self.all_group.all_groups[db_group.name] group_host_count += len(mem_group.hosts) all_host_names = sorted([h.name for h in mem_group.hosts if not h.instance_id]) - for offset2 in xrange(0, len(all_host_names), self._batch_size): + for offset2 in range(0, len(all_host_names), self._batch_size): host_names = all_host_names[offset2:(offset2 + self._batch_size)] db_hosts_qs = self.inventory.hosts.filter(name__in=host_names) for db_host in db_hosts_qs.filter(groups__id=db_group.id): @@ -859,7 +861,7 @@ class Command(BaseCommand): self._batch_add_m2m(db_group.hosts, db_host) logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name) all_instance_ids = sorted([h.instance_id for h in mem_group.hosts if h.instance_id]) - for offset2 in xrange(0, len(all_instance_ids), self._batch_size): + for offset2 in range(0, len(all_instance_ids), self._batch_size): instance_ids = all_instance_ids[offset2:(offset2 + self._batch_size)] db_hosts_qs = self.inventory.hosts.filter(instance_id__in=instance_ids) for db_host in db_hosts_qs.filter(groups__id=db_group.id): @@ -1074,4 +1076,4 @@ class Command(BaseCommand): if exc and isinstance(exc, CommandError): sys.exit(1) elif exc: - raise + raise exc diff --git a/awx/main/management/commands/register_queue.py b/awx/main/management/commands/register_queue.py index 2894ecba97..6a99d11849 100644 --- a/awx/main/management/commands/register_queue.py +++ b/awx/main/management/commands/register_queue.py @@ -19,11 +19,11 @@ class InstanceNotFound(Exception): class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument('--queuename', dest='queuename', type=lambda s: six.text_type(s, 'utf8'), + parser.add_argument('--queuename', dest='queuename', type=str, help='Queue to create/update') - parser.add_argument('--hostnames', dest='hostnames', type=lambda s: six.text_type(s, 'utf8'), + parser.add_argument('--hostnames', dest='hostnames', type=str, help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)') - parser.add_argument('--controller', dest='controller', type=lambda s: six.text_type(s, 'utf8'), + parser.add_argument('--controller', dest='controller', type=str, default='', help='The controlling group (makes this an isolated group)') parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0, help='The percentage of active instances that will be assigned to this group'), diff --git a/awx/main/management/commands/replay_job_events.py b/awx/main/management/commands/replay_job_events.py index 68634092f6..875578ec29 100644 --- a/awx/main/management/commands/replay_job_events.py +++ b/awx/main/management/commands/replay_job_events.py @@ -154,7 +154,7 @@ class ReplayJobEvents(JobStatusLifeCycle): continue if debug: - raw_input("{} of {}:".format(n, job_event_count)) + input("{} of {}:".format(n, job_event_count)) if not je_previous: stats['recording_start'] = je_current.created diff --git a/awx/main/management/commands/run_dispatcher.py b/awx/main/management/commands/run_dispatcher.py index 312c146e20..a0008b9504 100644 --- a/awx/main/management/commands/run_dispatcher.py +++ b/awx/main/management/commands/run_dispatcher.py @@ -19,7 +19,7 @@ logger = logging.getLogger('awx.main.dispatch') def construct_bcast_queue_name(common_name): - return common_name.encode('utf8') + '_' + settings.CLUSTER_HOST_ID + return common_name + '_' + settings.CLUSTER_HOST_ID class Command(BaseCommand): @@ -80,10 +80,10 @@ class Command(BaseCommand): def handle(self, *arg, **options): if options.get('status'): - print Control('dispatcher').status() + print(Control('dispatcher').status()) return if options.get('running'): - print Control('dispatcher').running() + print(Control('dispatcher').running()) return if options.get('reload'): return Control('dispatcher').control({'control': 'reload'}) diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index bdc98cace2..fb8a88e676 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -27,7 +27,7 @@ class Migration(migrations.Migration): name='ActivityStream', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])), + ('operation', models.CharField(max_length=13, choices=[('create', 'Entity Created'), ('update', 'Entity Updated'), ('delete', 'Entity Deleted'), ('associate', 'Entity Associated with another Entity'), ('disassociate', 'Entity was Disassociated with another Entity')])), ('timestamp', models.DateTimeField(auto_now_add=True)), ('changes', models.TextField(blank=True)), ('object_relationship_type', models.TextField(blank=True)), @@ -42,8 +42,8 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), - ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])), + ('host_name', models.CharField(default='', max_length=1024, editable=False)), + ('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped')])), ('event_data', jsonfield.fields.JSONField(default={}, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), @@ -60,8 +60,8 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('expires', models.DateTimeField(default=django.utils.timezone.now)), - ('request_hash', models.CharField(default=b'', max_length=40, blank=True)), - ('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)), + ('request_hash', models.CharField(default='', max_length=40, blank=True)), + ('reason', models.CharField(default='', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)), ('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)), ], ), @@ -71,22 +71,22 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), - ('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])), + ('kind', models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')])), ('cloud', models.BooleanField(default=False, editable=False)), - ('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)), - ('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)), - ('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)), - ('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)), - ('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)), - ('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)), - ('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)), - ('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])), - ('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)), - ('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)), - ('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)), + ('host', models.CharField(default='', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)), + ('username', models.CharField(default='', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)), + ('password', models.CharField(default='', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)), + ('security_token', models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)), + ('project', models.CharField(default='', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)), + ('ssh_key_data', models.TextField(default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)), + ('ssh_key_unlock', models.CharField(default='', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)), + ('become_method', models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')])), + ('become_username', models.CharField(default='', help_text='Privilege escalation username.', max_length=1024, blank=True)), + ('become_password', models.CharField(default='', help_text='Password for privilege escalation method.', max_length=1024, blank=True)), + ('vault_password', models.CharField(default='', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)), ('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')), @@ -101,10 +101,10 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), - ('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)), + ('script', models.TextField(default='', help_text='Inventory script contents', blank=True)), ('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ], @@ -118,10 +118,10 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), - ('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)), + ('variables', models.TextField(default='', help_text='Group variables in JSON or YAML format.', blank=True)), ('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)), ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)), ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)), @@ -140,12 +140,12 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), ('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')), - ('instance_id', models.CharField(default=b'', max_length=100, blank=True)), - ('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)), + ('instance_id', models.CharField(default='', max_length=100, blank=True)), + ('variables', models.TextField(default='', help_text='Host variables in JSON or YAML format.', blank=True)), ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)), ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)), ('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), @@ -171,10 +171,10 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(unique=True, max_length=512)), - ('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)), + ('variables', models.TextField(default='', help_text='Inventory variables in JSON or YAML format.', blank=True)), ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)), ('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)), ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)), @@ -197,14 +197,14 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])), + ('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete')])), ('event_data', jsonfield.fields.JSONField(default={}, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), - ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), - ('play', models.CharField(default=b'', max_length=1024, editable=False)), - ('role', models.CharField(default=b'', max_length=1024, editable=False)), - ('task', models.CharField(default=b'', max_length=1024, editable=False)), + ('host_name', models.CharField(default='', max_length=1024, editable=False)), + ('play', models.CharField(default='', max_length=1024, editable=False)), + ('role', models.CharField(default='', max_length=1024, editable=False)), + ('task', models.CharField(default='', max_length=1024, editable=False)), ('counter', models.PositiveIntegerField(default=0)), ('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)), ('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')), @@ -220,7 +220,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), + ('host_name', models.CharField(default='', max_length=1024, editable=False)), ('changed', models.PositiveIntegerField(default=0, editable=False)), ('dark', models.PositiveIntegerField(default=0, editable=False)), ('failures', models.PositiveIntegerField(default=0, editable=False)), @@ -250,7 +250,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(unique=True, max_length=512)), ('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)), @@ -269,10 +269,10 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), - ('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])), + ('permission_type', models.CharField(max_length=64, choices=[('read', 'Read Inventory'), ('write', 'Edit Inventory'), ('admin', 'Administrate Inventory'), ('run', 'Deploy To Inventory'), ('check', 'Deploy To Inventory (Dry Run)'), ('scan', 'Scan an Inventory'), ('create', 'Create a Job Template')])), ('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')), ('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)), @@ -286,7 +286,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('ldap_dn', models.CharField(default=b'', max_length=1024)), + ('ldap_dn', models.CharField(default='', max_length=1024)), ('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)), ], ), @@ -296,7 +296,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(unique=True, max_length=512)), ('enabled', models.BooleanField(default=True)), @@ -319,7 +319,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), ('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), @@ -338,26 +338,26 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), ('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)), - ('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])), + ('launch_type', models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency')])), ('cancel_flag', models.BooleanField(default=False, editable=False)), - ('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])), + ('status', models.CharField(default='new', max_length=20, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')])), ('failed', models.BooleanField(default=False, editable=False)), ('started', models.DateTimeField(default=None, null=True, editable=False)), ('finished', models.DateTimeField(default=None, null=True, editable=False)), ('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)), - ('job_args', models.TextField(default=b'', editable=False, blank=True)), - ('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)), + ('job_args', models.TextField(default='', editable=False, blank=True)), + ('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)), ('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)), - ('job_explanation', models.TextField(default=b'', editable=False, blank=True)), - ('start_args', models.TextField(default=b'', editable=False, blank=True)), - ('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)), - ('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)), - ('result_traceback', models.TextField(default=b'', editable=False, blank=True)), - ('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)), + ('job_explanation', models.TextField(default='', editable=False, blank=True)), + ('start_args', models.TextField(default='', editable=False, blank=True)), + ('result_stdout_text', models.TextField(default='', editable=False, blank=True)), + ('result_stdout_file', models.TextField(default='', editable=False, blank=True)), + ('result_traceback', models.TextField(default='', editable=False, blank=True)), + ('celery_task_id', models.CharField(default='', max_length=100, editable=False, blank=True)), ], ), migrations.CreateModel( @@ -366,7 +366,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('active', models.BooleanField(default=True, editable=False)), ('name', models.CharField(max_length=512)), ('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)), @@ -374,19 +374,19 @@ class Migration(migrations.Migration): ('last_job_run', models.DateTimeField(default=None, null=True, editable=False)), ('has_schedules', models.BooleanField(default=False, editable=False)), ('next_job_run', models.DateTimeField(default=None, null=True, editable=False)), - ('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])), + ('status', models.CharField(default='ok', max_length=32, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled'), ('never updated', 'Never Updated'), ('ok', 'OK'), ('missing', 'Missing'), ('none', 'No External Source'), ('updating', 'Updating')])), ], ), migrations.CreateModel( name='AdHocCommand', fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), - ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')])), - ('limit', models.CharField(default=b'', max_length=1024, blank=True)), - ('module_name', models.CharField(default=b'', max_length=1024, blank=True)), - ('module_args', models.TextField(default=b'', blank=True)), + ('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check')])), + ('limit', models.CharField(default='', max_length=1024, blank=True)), + ('module_name', models.CharField(default='', max_length=1024, blank=True)), + ('module_args', models.TextField(default='', blank=True)), ('forks', models.PositiveIntegerField(default=0, blank=True)), - ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])), + ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])), ('become_enabled', models.BooleanField(default=False)), ], bases=('main.unifiedjob',), @@ -395,12 +395,12 @@ class Migration(migrations.Migration): name='InventorySource', fields=[ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), - ('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])), - ('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)), - ('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)), - ('source_regions', models.CharField(default=b'', max_length=1024, blank=True)), - ('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)), - ('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)), + ('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])), + ('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)), + ('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)), + ('source_regions', models.CharField(default='', max_length=1024, blank=True)), + ('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)), + ('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)), ('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')), ('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')), ('update_on_launch', models.BooleanField(default=False)), @@ -412,12 +412,12 @@ class Migration(migrations.Migration): name='InventoryUpdate', fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), - ('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])), - ('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)), - ('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)), - ('source_regions', models.CharField(default=b'', max_length=1024, blank=True)), - ('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)), - ('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)), + ('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])), + ('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)), + ('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)), + ('source_regions', models.CharField(default='', max_length=1024, blank=True)), + ('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)), + ('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)), ('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')), ('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')), ('license_error', models.BooleanField(default=False, editable=False)), @@ -428,16 +428,16 @@ class Migration(migrations.Migration): name='Job', fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), - ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])), - ('playbook', models.CharField(default=b'', max_length=1024, blank=True)), + ('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])), + ('playbook', models.CharField(default='', max_length=1024, blank=True)), ('forks', models.PositiveIntegerField(default=0, blank=True)), - ('limit', models.CharField(default=b'', max_length=1024, blank=True)), - ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])), - ('extra_vars', models.TextField(default=b'', blank=True)), - ('job_tags', models.CharField(default=b'', max_length=1024, blank=True)), + ('limit', models.CharField(default='', max_length=1024, blank=True)), + ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])), + ('extra_vars', models.TextField(default='', blank=True)), + ('job_tags', models.CharField(default='', max_length=1024, blank=True)), ('force_handlers', models.BooleanField(default=False)), - ('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)), - ('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)), + ('skip_tags', models.CharField(default='', max_length=1024, blank=True)), + ('start_at_task', models.CharField(default='', max_length=1024, blank=True)), ('become_enabled', models.BooleanField(default=False)), ], options={ @@ -449,18 +449,18 @@ class Migration(migrations.Migration): name='JobTemplate', fields=[ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), - ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])), - ('playbook', models.CharField(default=b'', max_length=1024, blank=True)), + ('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])), + ('playbook', models.CharField(default='', max_length=1024, blank=True)), ('forks', models.PositiveIntegerField(default=0, blank=True)), - ('limit', models.CharField(default=b'', max_length=1024, blank=True)), - ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])), - ('extra_vars', models.TextField(default=b'', blank=True)), - ('job_tags', models.CharField(default=b'', max_length=1024, blank=True)), + ('limit', models.CharField(default='', max_length=1024, blank=True)), + ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])), + ('extra_vars', models.TextField(default='', blank=True)), + ('job_tags', models.CharField(default='', max_length=1024, blank=True)), ('force_handlers', models.BooleanField(default=False)), - ('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)), - ('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)), + ('skip_tags', models.CharField(default='', max_length=1024, blank=True)), + ('start_at_task', models.CharField(default='', max_length=1024, blank=True)), ('become_enabled', models.BooleanField(default=False)), - ('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)), + ('host_config_key', models.CharField(default='', max_length=1024, blank=True)), ('ask_variables_on_launch', models.BooleanField(default=False)), ('survey_enabled', models.BooleanField(default=False)), ('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)), @@ -475,9 +475,9 @@ class Migration(migrations.Migration): fields=[ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)), - ('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])), - ('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)), - ('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)), + ('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])), + ('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)), + ('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)), ('scm_clean', models.BooleanField(default=False)), ('scm_delete_on_update', models.BooleanField(default=False)), ('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)), @@ -494,9 +494,9 @@ class Migration(migrations.Migration): fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)), - ('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])), - ('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)), - ('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)), + ('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])), + ('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)), + ('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)), ('scm_clean', models.BooleanField(default=False)), ('scm_delete_on_update', models.BooleanField(default=False)), ], @@ -506,8 +506,8 @@ class Migration(migrations.Migration): name='SystemJob', fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), - ('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])), - ('extra_vars', models.TextField(default=b'', blank=True)), + ('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])), + ('extra_vars', models.TextField(default='', blank=True)), ], options={ 'ordering': ('id',), @@ -518,7 +518,7 @@ class Migration(migrations.Migration): name='SystemJobTemplate', fields=[ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), - ('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])), + ('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])), ], bases=('main.unifiedjobtemplate', models.Model), ), diff --git a/awx/main/migrations/0002_squashed_v300_release.py b/awx/main/migrations/0002_squashed_v300_release.py index 0b78c37cb0..11190a20da 100644 --- a/awx/main/migrations/0002_squashed_v300_release.py +++ b/awx/main/migrations/0002_squashed_v300_release.py @@ -105,24 +105,24 @@ def create_system_job_templates(apps, schema_editor): class Migration(migrations.Migration): - replaces = [(b'main', '0002_v300_tower_settings_changes'), - (b'main', '0003_v300_notification_changes'), - (b'main', '0004_v300_fact_changes'), - (b'main', '0005_v300_migrate_facts'), - (b'main', '0006_v300_active_flag_cleanup'), - (b'main', '0007_v300_active_flag_removal'), - (b'main', '0008_v300_rbac_changes'), - (b'main', '0009_v300_rbac_migrations'), - (b'main', '0010_v300_create_system_job_templates'), - (b'main', '0011_v300_credential_domain_field'), - (b'main', '0012_v300_create_labels'), - (b'main', '0013_v300_label_changes'), - (b'main', '0014_v300_invsource_cred'), - (b'main', '0015_v300_label_changes'), - (b'main', '0016_v300_prompting_changes'), - (b'main', '0017_v300_prompting_migrations'), - (b'main', '0018_v300_host_ordering'), - (b'main', '0019_v300_new_azure_credential'),] + replaces = [('main', '0002_v300_tower_settings_changes'), + ('main', '0003_v300_notification_changes'), + ('main', '0004_v300_fact_changes'), + ('main', '0005_v300_migrate_facts'), + ('main', '0006_v300_active_flag_cleanup'), + ('main', '0007_v300_active_flag_removal'), + ('main', '0008_v300_rbac_changes'), + ('main', '0009_v300_rbac_migrations'), + ('main', '0010_v300_create_system_job_templates'), + ('main', '0011_v300_credential_domain_field'), + ('main', '0012_v300_create_labels'), + ('main', '0013_v300_label_changes'), + ('main', '0014_v300_invsource_cred'), + ('main', '0015_v300_label_changes'), + ('main', '0016_v300_prompting_changes'), + ('main', '0017_v300_prompting_migrations'), + ('main', '0018_v300_host_ordering'), + ('main', '0019_v300_new_azure_credential'),] dependencies = [ ('taggit', '0002_auto_20150616_2121'), @@ -143,7 +143,7 @@ class Migration(migrations.Migration): ('description', models.TextField()), ('category', models.CharField(max_length=128)), ('value', models.TextField(blank=True)), - ('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])), + ('value_type', models.CharField(max_length=12, choices=[('string', 'String'), ('int', 'Integer'), ('float', 'Decimal'), ('json', 'JSON'), ('bool', 'Boolean'), ('password', 'Password'), ('list', 'List')])), ('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ], ), @@ -154,12 +154,12 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])), - ('error', models.TextField(default=b'', editable=False, blank=True)), + ('status', models.CharField(default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')])), + ('error', models.TextField(default='', editable=False, blank=True)), ('notifications_sent', models.IntegerField(default=0, editable=False)), - ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])), - ('recipients', models.TextField(default=b'', editable=False, blank=True)), - ('subject', models.TextField(default=b'', editable=False, blank=True)), + ('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])), + ('recipients', models.TextField(default='', editable=False, blank=True)), + ('subject', models.TextField(default='', editable=False, blank=True)), ('body', jsonfield.fields.JSONField(default=dict, blank=True)), ], options={ @@ -172,9 +172,9 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('name', models.CharField(unique=True, max_length=512)), - ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])), + ('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])), ('notification_configuration', jsonfield.fields.JSONField(default=dict)), ('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), @@ -381,7 +381,7 @@ class Migration(migrations.Migration): ('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)), ('members', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)), ('parents', models.ManyToManyField(related_name='children', to='main.Role')), - ('implicit_parents', models.TextField(default=b'[]')), + ('implicit_parents', models.TextField(default='[]')), ('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', null=True)), ('object_id', models.PositiveIntegerField(default=None, null=True)), @@ -422,122 +422,122 @@ class Migration(migrations.Migration): migrations.AddField( model_name='credential', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator'], to='main.Role', null='True'), ), migrations.AddField( model_name='credential', name='use_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='credential', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='custominventoryscript', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='custominventoryscript', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='inventory', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='inventory', name='adhoc_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='inventory', name='update_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='inventory', name='use_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'adhoc_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='adhoc_role', to='main.Role', null='True'), ), migrations.AddField( model_name='inventory', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'update_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='jobtemplate', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.admin_role', b'inventory.organization.admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='jobtemplate', name='execute_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='jobtemplate', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.auditor_role', b'inventory.organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='organization', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True'), ), migrations.AddField( model_name='organization', name='auditor_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_auditor', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_auditor', to='main.Role', null='True'), ), migrations.AddField( model_name='organization', name='member_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='organization', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'member_role', b'auditor_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['member_role', 'auditor_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='project', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'singleton:system_administrator'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'), ), migrations.AddField( model_name='project', name='use_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='project', name='update_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='project', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'singleton:system_auditor', b'use_role', b'update_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='team', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'), ), migrations.AddField( model_name='team', name='member_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null='True'), ), migrations.AddField( model_name='team', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role', b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'), ), # System Job Templates @@ -545,18 +545,18 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='systemjob', name='job_type', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]), ), migrations.AlterField( model_name='systemjobtemplate', name='job_type', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]), ), # Credential domain field migrations.AddField( model_name='credential', name='domain', - field=models.CharField(default=b'', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True), + field=models.CharField(default='', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True), ), # Create Labels migrations.CreateModel( @@ -565,7 +565,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('name', models.CharField(max_length=512)), ('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), @@ -625,7 +625,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='credential', name='authorize_password', - field=models.CharField(default=b'', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True), + field=models.CharField(default='', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True), ), migrations.AlterField( model_name='credential', @@ -640,17 +640,17 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='kind', - field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]), + field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')]), ), migrations.AlterField( model_name='inventorysource', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventoryupdate', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='team', @@ -702,41 +702,41 @@ class Migration(migrations.Migration): migrations.AddField( model_name='credential', name='client', - field=models.CharField(default=b'', help_text='Client Id or Application Id for the credential', max_length=128, blank=True), + field=models.CharField(default='', help_text='Client Id or Application Id for the credential', max_length=128, blank=True), ), migrations.AddField( model_name='credential', name='secret', - field=models.CharField(default=b'', help_text='Secret Token for this credential', max_length=1024, blank=True), + field=models.CharField(default='', help_text='Secret Token for this credential', max_length=1024, blank=True), ), migrations.AddField( model_name='credential', name='subscription', - field=models.CharField(default=b'', help_text='Subscription identifier for this credential', max_length=1024, blank=True), + field=models.CharField(default='', help_text='Subscription identifier for this credential', max_length=1024, blank=True), ), migrations.AddField( model_name='credential', name='tenant', - field=models.CharField(default=b'', help_text='Tenant identifier for this credential', max_length=1024, blank=True), + field=models.CharField(default='', help_text='Tenant identifier for this credential', max_length=1024, blank=True), ), migrations.AlterField( model_name='credential', name='kind', - field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]), + field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]), ), migrations.AlterField( model_name='host', name='instance_id', - field=models.CharField(default=b'', max_length=1024, blank=True), + field=models.CharField(default='', max_length=1024, blank=True), ), migrations.AlterField( model_name='inventorysource', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventoryupdate', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), ] diff --git a/awx/main/migrations/0003_squashed_v300_v303_updates.py b/awx/main/migrations/0003_squashed_v300_v303_updates.py index 48210bfa48..3c3680a4e7 100644 --- a/awx/main/migrations/0003_squashed_v300_v303_updates.py +++ b/awx/main/migrations/0003_squashed_v300_v303_updates.py @@ -9,20 +9,20 @@ from django.db import migrations, models from django.conf import settings import awx.main.fields -import _squashed -from _squashed_30 import SQUASHED_30 +from . import _squashed +from ._squashed_30 import SQUASHED_30 class Migration(migrations.Migration): - replaces = [(b'main', '0020_v300_labels_changes'), - (b'main', '0021_v300_activity_stream'), - (b'main', '0022_v300_adhoc_extravars'), - (b'main', '0023_v300_activity_stream_ordering'), - (b'main', '0024_v300_jobtemplate_allow_simul'), - (b'main', '0025_v300_update_rbac_parents'), - (b'main', '0026_v300_credential_unique'), - (b'main', '0027_v300_team_migrations'), - (b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True) + replaces = [('main', '0020_v300_labels_changes'), + ('main', '0021_v300_activity_stream'), + ('main', '0022_v300_adhoc_extravars'), + ('main', '0023_v300_activity_stream_ordering'), + ('main', '0024_v300_jobtemplate_allow_simul'), + ('main', '0025_v300_update_rbac_parents'), + ('main', '0026_v300_credential_unique'), + ('main', '0027_v300_team_migrations'), + ('main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True) dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -63,22 +63,22 @@ class Migration(migrations.Migration): migrations.AddField( model_name='adhoccommand', name='extra_vars', - field=models.TextField(default=b'', blank=True), + field=models.TextField(default='', blank=True), ), migrations.AlterField( model_name='credential', name='kind', - field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]), + field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]), ), migrations.AlterField( model_name='inventorysource', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventoryupdate', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), # jobtemplate allow simul migrations.AddField( @@ -90,17 +90,17 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='use_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AlterField( model_name='team', name='member_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'), ), migrations.AlterField( model_name='team', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'member_role'], to='main.Role', null='True'), ), # Unique credential migrations.AlterUniqueTogether( @@ -110,7 +110,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'), ), # Team cascade migrations.AlterField( diff --git a/awx/main/migrations/0004_squashed_v310_release.py b/awx/main/migrations/0004_squashed_v310_release.py index f4263feaa3..965b5184d7 100644 --- a/awx/main/migrations/0004_squashed_v310_release.py +++ b/awx/main/migrations/0004_squashed_v310_release.py @@ -8,8 +8,8 @@ import django.db.models.deletion import awx.main.models.workflow import awx.main.fields -import _squashed -from _squashed_30 import SQUASHED_30 +from . import _squashed +from ._squashed_30 import SQUASHED_30 class Migration(migrations.Migration): @@ -19,7 +19,7 @@ class Migration(migrations.Migration): ] replaces = _squashed.replaces(SQUASHED_30) + [ - (b'main', '0034_v310_release'), + ('main', '0034_v310_release'), ] operations = _squashed.operations(SQUASHED_30) + [ @@ -42,13 +42,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='jobevent', name='uuid', - field=models.CharField(default=b'', max_length=1024, editable=False), + field=models.CharField(default='', max_length=1024, editable=False), ), # Job Parent Event UUID migrations.AddField( model_name='jobevent', name='parent_uuid', - field=models.CharField(default=b'', max_length=1024, editable=False), + field=models.CharField(default='', max_length=1024, editable=False), ), # Modify the HA Instance migrations.RemoveField( @@ -63,19 +63,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='become_method', - field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun')]), + field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')]), ), # Add Workflows migrations.AlterField( model_name='unifiedjob', name='launch_type', - field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]), + field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync')]), ), migrations.CreateModel( name='WorkflowJob', fields=[ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), - ('extra_vars', models.TextField(default=b'', blank=True)), + ('extra_vars', models.TextField(default='', blank=True)), ], options={ 'ordering': ('id',), @@ -101,8 +101,8 @@ class Migration(migrations.Migration): name='WorkflowJobTemplate', fields=[ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), - ('extra_vars', models.TextField(default=b'', blank=True)), - ('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')), + ('extra_vars', models.TextField(default='', blank=True)), + ('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True')), ], bases=('main.unifiedjobtemplate', models.Model), ), @@ -176,7 +176,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='execute_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='workflowjobtemplate', @@ -186,7 +186,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -216,7 +216,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='workflowjobtemplate', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'), ), migrations.AlterField( model_name='workflowjobtemplatenode', @@ -269,23 +269,23 @@ class Migration(migrations.Migration): migrations.AddField( model_name='unifiedjob', name='execution_node', - field=models.TextField(default=b'', editable=False, blank=True), + field=models.TextField(default='', editable=False, blank=True), ), # SCM Revision migrations.AddField( model_name='project', name='scm_revision', - field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'), + field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'), ), migrations.AddField( model_name='projectupdate', name='job_type', - field=models.CharField(default=b'check', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')]), + field=models.CharField(default='check', max_length=64, choices=[('run', 'Run'), ('check', 'Check')]), ), migrations.AddField( model_name='job', name='scm_revision', - field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'), + field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'), ), # Project Playbook Files migrations.AddField( @@ -307,12 +307,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='adhoccommandevent', name='stdout', - field=models.TextField(default=b'', editable=False), + field=models.TextField(default='', editable=False), ), migrations.AddField( model_name='adhoccommandevent', name='uuid', - field=models.CharField(default=b'', max_length=1024, editable=False), + field=models.CharField(default='', max_length=1024, editable=False), ), migrations.AddField( model_name='adhoccommandevent', @@ -327,7 +327,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='jobevent', name='playbook', - field=models.CharField(default=b'', max_length=1024, editable=False), + field=models.CharField(default='', max_length=1024, editable=False), ), migrations.AddField( model_name='jobevent', @@ -337,7 +337,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='jobevent', name='stdout', - field=models.TextField(default=b'', editable=False), + field=models.TextField(default='', editable=False), ), migrations.AddField( model_name='jobevent', @@ -352,7 +352,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='adhoccommandevent', name='event', - field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]), + field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]), ), migrations.AlterField( model_name='jobevent', @@ -362,7 +362,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='jobevent', name='event', - field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]), + field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]), ), migrations.AlterUniqueTogether( name='adhoccommandevent', @@ -505,7 +505,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='host', name='instance_id', - field=models.CharField(default=b'', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True), + field=models.CharField(default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True), ), migrations.AlterField( model_name='project', @@ -520,7 +520,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='project', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), migrations.AlterField( model_name='project', @@ -535,7 +535,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='project', name='scm_url', - field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True), + field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True), ), migrations.AlterField( model_name='project', @@ -555,12 +555,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='projectupdate', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), migrations.AlterField( model_name='projectupdate', name='scm_url', - field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True), + field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True), ), migrations.AlterField( model_name='projectupdate', @@ -600,7 +600,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='unifiedjob', name='execution_node', - field=models.TextField(default=b'', help_text='The Tower node the job executed on.', editable=False, blank=True), + field=models.TextField(default='', help_text='The Tower node the job executed on.', editable=False, blank=True), ), migrations.AlterField( model_name='unifiedjob', @@ -610,7 +610,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='unifiedjob', name='job_explanation', - field=models.TextField(default=b'', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True), + field=models.TextField(default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True), ), migrations.AlterField( model_name='unifiedjob', diff --git a/awx/main/migrations/0005_squashed_v310_v313_updates.py b/awx/main/migrations/0005_squashed_v310_v313_updates.py index 3a2e05e270..771d80fc6f 100644 --- a/awx/main/migrations/0005_squashed_v310_v313_updates.py +++ b/awx/main/migrations/0005_squashed_v310_v313_updates.py @@ -2,8 +2,8 @@ from __future__ import unicode_literals from django.db import migrations -import _squashed -from _squashed_31 import SQUASHED_31 +from . import _squashed +from ._squashed_31 import SQUASHED_31 class Migration(migrations.Migration): diff --git a/awx/main/migrations/0006_v320_release.py b/awx/main/migrations/0006_v320_release.py index 8902a34438..cda08b98c3 100644 --- a/awx/main/migrations/0006_v320_release.py +++ b/awx/main/migrations/0006_v320_release.py @@ -72,7 +72,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='inventory', name='kind', - field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]), + field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]), ), migrations.CreateModel( name='SmartInventoryMembership', @@ -143,7 +143,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='inventorysource', name='scm_last_revision', - field=models.CharField(default=b'', max_length=1024, editable=False, blank=True), + field=models.CharField(default='', max_length=1024, editable=False, blank=True), ), migrations.AddField( model_name='inventorysource', @@ -163,27 +163,27 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='inventorysource', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventoryupdate', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventorysource', name='source_path', - field=models.CharField(default=b'', max_length=1024, blank=True), + field=models.CharField(default='', max_length=1024, blank=True), ), migrations.AlterField( model_name='inventoryupdate', name='source_path', - field=models.CharField(default=b'', max_length=1024, blank=True), + field=models.CharField(default='', max_length=1024, blank=True), ), migrations.AlterField( model_name='unifiedjob', name='launch_type', - field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync'), (b'scm', 'SCM Update')]), + field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')]), ), migrations.AddField( model_name='inventorysource', @@ -211,12 +211,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='inventorysource', name='verbosity', - field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]), + field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]), ), migrations.AddField( model_name='inventoryupdate', name='verbosity', - field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]), + field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]), ), # Job Templates @@ -317,7 +317,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='inventory', name='kind', - field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]), + field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]), ), # Timeout help text update @@ -378,9 +378,9 @@ class Migration(migrations.Migration): ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('description', models.TextField(default=b'', blank=True)), + ('description', models.TextField(default='', blank=True)), ('name', models.CharField(max_length=512)), - ('kind', models.CharField(max_length=32, choices=[(b'ssh', 'Machine'), (b'vault', 'Vault'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'cloud', 'Cloud'), (b'insights', 'Insights')])), + ('kind', models.CharField(max_length=32, choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights')])), ('managed_by_tower', models.BooleanField(default=False, editable=False)), ('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')), ('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')), @@ -435,7 +435,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='become_method', - field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun'), (b'runas', 'Runas')]), + field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun'), ('runas', 'Runas')]), ), # Connecting activity stream @@ -496,6 +496,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='unifiedjob', name='execution_node', - field=models.TextField(default=b'', help_text='The node the job executed on.', editable=False, blank=True), + field=models.TextField(default='', help_text='The node the job executed on.', editable=False, blank=True), ), ] diff --git a/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py b/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py index 2cdf557856..ea8b91c7d4 100644 --- a/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py +++ b/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py @@ -20,11 +20,11 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='inventorysource', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]), ), migrations.AlterField( model_name='inventoryupdate', name='source', - field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]), + field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]), ), ] diff --git a/awx/main/migrations/0018_v330_add_additional_stdout_events.py b/awx/main/migrations/0018_v330_add_additional_stdout_events.py index 6612e28b7c..80fdbe3bf7 100644 --- a/awx/main/migrations/0018_v330_add_additional_stdout_events.py +++ b/awx/main/migrations/0018_v330_add_additional_stdout_events.py @@ -21,9 +21,9 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('event_data', awx.main.fields.JSONField(blank=True, default={})), - ('uuid', models.CharField(default=b'', editable=False, max_length=1024)), + ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), - ('stdout', models.TextField(default=b'', editable=False)), + ('stdout', models.TextField(default='', editable=False)), ('verbosity', models.PositiveIntegerField(default=0, editable=False)), ('start_line', models.PositiveIntegerField(default=0, editable=False)), ('end_line', models.PositiveIntegerField(default=0, editable=False)), @@ -39,17 +39,17 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event', models.CharField(choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')], max_length=100)), + ('event', models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100)), ('event_data', awx.main.fields.JSONField(blank=True, default={})), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), - ('uuid', models.CharField(default=b'', editable=False, max_length=1024)), - ('playbook', models.CharField(default=b'', editable=False, max_length=1024)), - ('play', models.CharField(default=b'', editable=False, max_length=1024)), - ('role', models.CharField(default=b'', editable=False, max_length=1024)), - ('task', models.CharField(default=b'', editable=False, max_length=1024)), + ('uuid', models.CharField(default='', editable=False, max_length=1024)), + ('playbook', models.CharField(default='', editable=False, max_length=1024)), + ('play', models.CharField(default='', editable=False, max_length=1024)), + ('role', models.CharField(default='', editable=False, max_length=1024)), + ('task', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), - ('stdout', models.TextField(default=b'', editable=False)), + ('stdout', models.TextField(default='', editable=False)), ('verbosity', models.PositiveIntegerField(default=0, editable=False)), ('start_line', models.PositiveIntegerField(default=0, editable=False)), ('end_line', models.PositiveIntegerField(default=0, editable=False)), @@ -66,9 +66,9 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('event_data', awx.main.fields.JSONField(blank=True, default={})), - ('uuid', models.CharField(default=b'', editable=False, max_length=1024)), + ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), - ('stdout', models.TextField(default=b'', editable=False)), + ('stdout', models.TextField(default='', editable=False)), ('verbosity', models.PositiveIntegerField(default=0, editable=False)), ('start_line', models.PositiveIntegerField(default=0, editable=False)), ('end_line', models.PositiveIntegerField(default=0, editable=False)), diff --git a/awx/main/migrations/0021_v330_declare_new_rbac_roles.py b/awx/main/migrations/0021_v330_declare_new_rbac_roles.py index 4714a0194c..20dc7685e0 100644 --- a/awx/main/migrations/0021_v330_declare_new_rbac_roles.py +++ b/awx/main/migrations/0021_v330_declare_new_rbac_roles.py @@ -18,77 +18,77 @@ class Migration(migrations.Migration): migrations.AddField( model_name='organization', name='execute_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='job_template_admin_role', - field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='credential_admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='inventory_admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='project_admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='workflow_admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AddField( model_name='organization', name='notification_admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), ), migrations.AlterField( model_name='credential', name='admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='inventory', name='admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'), ), migrations.AlterField( model_name='project', name='admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.project_admin_role', 'singleton:system_administrator'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='workflowjobtemplate', name='admin_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='workflowjobtemplate', name='execute_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='jobtemplate', name='admin_role', - field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.job_template_admin_role', b'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='jobtemplate', name='execute_role', - field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='organization', name='member_role', - field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'), ), ] diff --git a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py index e1b2d0d87e..08a1415951 100644 --- a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py +++ b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py @@ -35,8 +35,8 @@ class Migration(migrations.Migration): ('skip_authorization', models.BooleanField(default=False)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), - ('description', models.TextField(blank=True, default=b'')), - ('logo_data', models.TextField(default=b'', editable=False, validators=[django.core.validators.RegexValidator(re.compile(b'.*'))])), + ('description', models.TextField(blank=True, default='')), + ('logo_data', models.TextField(default='', editable=False, validators=[django.core.validators.RegexValidator(re.compile('.*'))])), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL)), ], options={ @@ -52,7 +52,7 @@ class Migration(migrations.Migration): ('scope', models.TextField(blank=True)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), - ('description', models.CharField(blank=True, default=b'', max_length=200)), + ('description', models.CharField(blank=True, default='', max_length=200)), ('last_used', models.DateTimeField(default=None, editable=False, null=True)), ('application', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL)), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL)), diff --git a/awx/main/migrations/0033_v330_oauth_help_text.py b/awx/main/migrations/0033_v330_oauth_help_text.py index 0b64579d65..8912978065 100644 --- a/awx/main/migrations/0033_v330_oauth_help_text.py +++ b/awx/main/migrations/0033_v330_oauth_help_text.py @@ -20,7 +20,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oauth2accesstoken', name='scope', - field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions."), + field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions."), ), migrations.AlterField( model_name='oauth2accesstoken', @@ -30,7 +30,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oauth2application', name='authorization_grant_type', - field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based'), (b'client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32), + field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32), ), migrations.AlterField( model_name='oauth2application', @@ -40,7 +40,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oauth2application', name='client_type', - field=models.CharField(choices=[(b'confidential', 'Confidential'), (b'public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32), + field=models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32), ), migrations.AlterField( model_name='oauth2application', diff --git a/awx/main/migrations/0035_v330_more_oauth2_help_text.py b/awx/main/migrations/0035_v330_more_oauth2_help_text.py index c109f3ef88..91d8531730 100644 --- a/awx/main/migrations/0035_v330_more_oauth2_help_text.py +++ b/awx/main/migrations/0035_v330_more_oauth2_help_text.py @@ -16,6 +16,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oauth2accesstoken', name='scope', - field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."), + field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."), ), ] diff --git a/awx/main/migrations/0040_v330_unifiedjob_controller_node.py b/awx/main/migrations/0040_v330_unifiedjob_controller_node.py index 8b127dd06d..fae3e641c4 100644 --- a/awx/main/migrations/0040_v330_unifiedjob_controller_node.py +++ b/awx/main/migrations/0040_v330_unifiedjob_controller_node.py @@ -15,6 +15,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='unifiedjob', name='controller_node', - field=models.TextField(blank=True, default=b'', editable=False, help_text='The instance that managed the isolated execution environment.'), + field=models.TextField(blank=True, default='', editable=False, help_text='The instance that managed the isolated execution environment.'), ), ] diff --git a/awx/main/migrations/0042_v330_org_member_role_deparent.py b/awx/main/migrations/0042_v330_org_member_role_deparent.py index 2ae100053d..67795e3901 100644 --- a/awx/main/migrations/0042_v330_org_member_role_deparent.py +++ b/awx/main/migrations/0042_v330_org_member_role_deparent.py @@ -18,12 +18,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='organization', name='member_role', - field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'), ), migrations.AlterField( model_name='organization', name='read_role', - field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'member_role', b'auditor_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'), + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'), ), migrations.RunPython(rebuild_role_hierarchy), ] diff --git a/awx/main/migrations/0046_v330_remove_client_credentials_grant.py b/awx/main/migrations/0046_v330_remove_client_credentials_grant.py index e4eca09fa8..ebb87b3b5a 100644 --- a/awx/main/migrations/0046_v330_remove_client_credentials_grant.py +++ b/awx/main/migrations/0046_v330_remove_client_credentials_grant.py @@ -15,6 +15,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oauth2application', name='authorization_grant_type', - field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32), + field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32), ), ] diff --git a/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py b/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py index 8c205ed3a1..13e04cbec8 100644 --- a/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py +++ b/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py @@ -17,131 +17,131 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='credential', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='credential', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='credentialtype', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='credentialtype', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='custominventoryscript', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='custominventoryscript', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='group', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='group', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='host', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='host', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventory', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='inventory', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='label', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='label', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='notificationtemplate', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='notificationtemplate', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='organization', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='organization', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='schedule', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='schedule', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='team', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='team', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='unifiedjob', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='unifiedjob', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='unifiedjobtemplate', name='created_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='unifiedjobtemplate', name='modified_by', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL), ), ] diff --git a/awx/main/migrations/_squashed.py b/awx/main/migrations/_squashed.py index e2f19970b4..6391a5bc3d 100644 --- a/awx/main/migrations/_squashed.py +++ b/awx/main/migrations/_squashed.py @@ -45,8 +45,8 @@ def replaces(squashed, applied=False): ''' squashed_keys, key_index = squash_data(squashed) if applied: - return [(b'main', key) for key in squashed_keys[:key_index]] - return [(b'main', key) for key in squashed_keys[key_index:]] + return [('main', key) for key in squashed_keys[:key_index]] + return [('main', key) for key in squashed_keys[key_index:]] def operations(squashed, applied=False): diff --git a/awx/main/migrations/_squashed_30.py b/awx/main/migrations/_squashed_30.py index 5dd99762d6..910be80287 100644 --- a/awx/main/migrations/_squashed_30.py +++ b/awx/main/migrations/_squashed_30.py @@ -42,12 +42,12 @@ SQUASHED_30 = { migrations.AlterField( model_name='credential', name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'), ), migrations.AlterField( model_name='credential', name='use_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'), ), ], '0033_v303_v245_host_variable_fix': [ diff --git a/awx/main/migrations/_squashed_31.py b/awx/main/migrations/_squashed_31.py index 3c43c26a48..1362943319 100644 --- a/awx/main/migrations/_squashed_31.py +++ b/awx/main/migrations/_squashed_31.py @@ -17,24 +17,24 @@ SQUASHED_31 = { migrations.AlterField( model_name='project', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), migrations.AlterField( model_name='projectupdate', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), ], '0036_v311_insights': [ migrations.AlterField( model_name='project', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), migrations.AlterField( model_name='projectupdate', name='scm_type', - field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), + field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'), ), ], '0037_v313_instance_version': [ diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 135cb9d899..e1f1783f3a 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -152,10 +152,6 @@ def o_auth2_token_get_absolute_url(self, request=None): OAuth2AccessToken.add_to_class('get_absolute_url', o_auth2_token_get_absolute_url) - -# Import signal handlers only after models have been defined. -import awx.main.signals # noqa - from awx.main.registrar import activity_stream_registrar # noqa activity_stream_registrar.connect(Organization) activity_stream_registrar.connect(Inventory) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 766cefbc5b..bd983dc58b 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -7,6 +7,7 @@ from awx.main.fields import JSONField # Django from django.db import models +from django.utils.encoding import smart_str from django.utils.translation import ugettext_lazy as _ __all__ = ['ActivityStream'] @@ -84,9 +85,9 @@ class ActivityStream(models.Model): if self.actor: self.deleted_actor = { 'id': self.actor_id, - 'username': self.actor.username, - 'first_name': self.actor.first_name, - 'last_name': self.actor.last_name, + 'username': smart_str(self.actor.username), + 'first_name': smart_str(self.actor.first_name), + 'last_name': smart_str(self.actor.last_name), } if 'update_fields' in kwargs and 'deleted_actor' not in kwargs['update_fields']: kwargs['update_fields'].append('deleted_actor') diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index af9c519812..ff2acbc890 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -3,7 +3,7 @@ # Python import logging -from urlparse import urljoin +from urllib.parse import urljoin # Django from django.conf import settings @@ -109,7 +109,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): return self.limit def clean_module_name(self): - if type(self.module_name) not in (str, unicode): + if type(self.module_name) is not str: raise ValidationError(_("Invalid type for ad hoc command")) module_name = self.module_name.strip() or 'command' if module_name not in settings.AD_HOC_COMMANDS: @@ -117,7 +117,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): return module_name def clean_module_args(self): - if type(self.module_args) not in (str, unicode): + if type(self.module_args) is not str: raise ValidationError(_("Invalid type for ad hoc command")) module_args = self.module_args if self.module_name in ('command', 'shell') and not module_args: diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 663711cafa..ba44f9aa98 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -92,7 +92,7 @@ class BaseModel(models.Model): class Meta: abstract = True - def __unicode__(self): + def __str__(self): if 'name' in self.__dict__: return u'%s-%s' % (self.name, self.pk) else: @@ -152,7 +152,7 @@ class CreatedModifiedModel(BaseModel): ) def save(self, *args, **kwargs): - update_fields = kwargs.get('update_fields', []) + update_fields = list(kwargs.get('update_fields', [])) # Manually perform auto_now_add and auto_now logic. if not self.pk and not self.created: self.created = now() diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index d6daa9f6fc..859583ad21 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -632,7 +632,7 @@ class CredentialType(CommonModelNameNotUnique): data = Template(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=private_data_dir) with open(path, 'w') as f: - f.write(data.encode('utf-8')) + f.write(data) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) # determine if filename indicates single file or many diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 5eb0390e18..c53cd69d7b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -27,7 +27,7 @@ __all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent', def sanitize_event_keys(kwargs, valid_keys): # Sanity check: Don't honor keys that we don't recognize. - for key in kwargs.keys(): + for key in list(kwargs.keys()): if key not in valid_keys: kwargs.pop(key) @@ -424,7 +424,7 @@ class JobEvent(BasePlaybookEvent): def get_absolute_url(self, request=None): return reverse('api:job_event_detail', kwargs={'pk': self.pk}, request=request) - def __unicode__(self): + def __str__(self): return u'%s @ %s' % (self.get_event_display2(), self.created.isoformat()) def _update_from_event_data(self): @@ -580,7 +580,7 @@ class BaseCommandEvent(CreatedModifiedModel): editable=False, ) - def __unicode__(self): + def __str__(self): return u'%s @ %s' % (self.get_event_display(), self.created.isoformat()) @classmethod diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 88289963f1..d5e718fc4c 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -1,11 +1,9 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import six import random from decimal import Decimal -from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator from django.db import models, connection from django.db.models.signals import post_save, post_delete @@ -31,15 +29,6 @@ from awx.main.models.mixins import RelatedJobsMixin __all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',) -def validate_queuename(v): - # kombu doesn't play nice with unicode in queue names - if v: - try: - '{}'.format(v.decode('utf-8')) - except UnicodeEncodeError: - raise ValidationError(_(six.text_type('{} contains unsupported characters')).format(v)) - - class HasPolicyEditsMixin(HasEditsMixin): class Meta: @@ -164,7 +153,6 @@ class Instance(HasPolicyEditsMixin, BaseModel): 'memory', 'cpu_capacity', 'mem_capacity']) def clean_hostname(self): - validate_queuename(self.hostname) return self.hostname @@ -235,7 +223,6 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): app_label = 'main' def clean_name(self): - validate_queuename(self.name) return self.name def fit_task_to_most_remaining_capacity_instance(self, task): diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index e02271870d..22fa45b8b8 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -4,12 +4,13 @@ # Python import datetime import time +import itertools import logging import re import copy -from urlparse import urljoin import os.path import six +from urllib.parse import urljoin # Django from django.conf import settings @@ -343,9 +344,13 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): host_updates = hosts_to_update.setdefault(host_pk, {}) host_updates['has_inventory_sources'] = False # Now apply updates to hosts where needed (in batches). - all_update_pks = hosts_to_update.keys() - for offset in xrange(0, len(all_update_pks), 500): - update_pks = all_update_pks[offset:(offset + 500)] + all_update_pks = list(hosts_to_update.keys()) + + def _chunk(items, chunk_size): + for i, group in itertools.groupby(enumerate(items), lambda x: x[0] // chunk_size): + yield (g[1] for g in group) + + for update_pks in _chunk(all_update_pks, 500): for host in hosts_qs.filter(pk__in=update_pks): host_updates = hosts_to_update[host.pk] for field, value in host_updates.items(): @@ -412,12 +417,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): failed_group_pks.add(group_pk) # Now apply updates to each group as needed (in batches). - all_update_pks = groups_to_update.keys() - for offset in xrange(0, len(all_update_pks), 500): + all_update_pks = list(groups_to_update.keys()) + for offset in range(0, len(all_update_pks), 500): update_pks = all_update_pks[offset:(offset + 500)] for group in self.groups.filter(pk__in=update_pks): group_updates = groups_to_update[group.pk] - for field, value in group_updates.items(): + for field, value in list(group_updates.items()): if getattr(group, field) != value: setattr(group, field, value) else: @@ -458,7 +463,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): } # CentOS python seems to have issues clobbering the inventory on poor timing during certain operations iobj = Inventory.objects.get(id=self.id) - for field, value in computed_fields.items(): + for field, value in list(computed_fields.items()): if getattr(iobj, field) != value: setattr(iobj, field, value) # update in-memory object diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index a989ed579d..43f2b01ea2 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -8,7 +8,7 @@ import logging import os import time import json -from urlparse import urljoin +from urllib.parse import urljoin import six @@ -347,8 +347,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory - for idx in xrange(min(self.job_slice_count, - actual_inventory.hosts.count())): + for idx in range(min(self.job_slice_count, + actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) @@ -695,7 +695,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count() if self.job_slice_count > 1: # Integer division intentional - count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) / self.job_slice_count + count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 @property @@ -1120,7 +1120,7 @@ class JobHostSummary(CreatedModifiedModel): skipped = models.PositiveIntegerField(default=0, editable=False) failed = models.BooleanField(default=False, editable=False) - def __unicode__(self): + def __str__(self): host = getattr_dne(self, 'host') hostname = host.name if host else 'N/A' return '%s changed=%d dark=%d failures=%d ok=%d processed=%d skipped=%s' % \ diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index a803ffe6e9..58ae9fcc3d 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -82,7 +82,7 @@ class NotificationTemplate(CommonModelNameNotUnique): setattr(self, '_saved_{}_{}'.format("config", field), value) self.notification_configuration[field] = '' else: - encrypted = encrypt_field(self, 'notification_configuration', subfield=field, skip_utf8=True) + encrypted = encrypt_field(self, 'notification_configuration', subfield=field) self.notification_configuration[field] = encrypted if 'notification_configuration' not in update_fields: update_fields.append('notification_configuration') diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 4974ad8c58..7502b81919 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -4,7 +4,7 @@ # Python import datetime import os -import urlparse +import urllib.parse as urlparse # Django from django.conf import settings @@ -68,7 +68,7 @@ class ProjectOptions(models.Model): @classmethod def get_local_path_choices(cls): if os.path.exists(settings.PROJECTS_ROOT): - paths = [x.decode('utf-8') for x in os.listdir(settings.PROJECTS_ROOT) + paths = [x for x in os.listdir(settings.PROJECTS_ROOT) if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and not x.startswith('.') and not x.startswith('_'))] qs = Project.objects diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 740aa8ebd2..b8f2e6ce2d 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -155,7 +155,7 @@ class Role(models.Model): object_id = models.PositiveIntegerField(null=True, default=None) content_object = GenericForeignKey('content_type', 'object_id') - def __unicode__(self): + def __str__(self): if 'role_field' in self.__dict__: return u'%s-%s' % (self.name, self.pk) else: @@ -315,7 +315,7 @@ class Role(models.Model): # minus 4k of padding for the other parts of the query, leads us # to the magic number of 41496, or 40000 for a nice round number def split_ids_for_sqlite(role_ids): - for i in xrange(0, len(role_ids), 40000): + for i in range(0, len(role_ids), 40000): yield role_ids[i:i + 40000] diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index 617c436534..62ec12c206 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -209,7 +209,7 @@ class Schedule(CommonModel, LaunchTimeConfig): pass return x - def __unicode__(self): + def __str__(self): return u'%s_t%s_%s_%s' % (self.name, self.unified_job_template.id, self.id, self.next_run) def get_absolute_url(self, request=None): diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 16c82d65c2..c097bec537 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Python -from StringIO import StringIO +from io import StringIO import json import logging import os @@ -353,7 +353,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio logger.warn(six.text_type('Fields {} are not allowed as overrides to spawn from {}.').format( six.text_type(', ').join(unallowed_fields), self )) - map(validated_kwargs.pop, unallowed_fields) + for f in unallowed_fields: + validated_kwargs.pop(f) unified_job = copy_model_by_class(self, unified_job_class, fields, validated_kwargs) @@ -735,7 +736,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def _resources_sufficient_for_launch(self): return True - def __unicode__(self): + def __str__(self): return u'%s-%s-%s' % (self.created, self.id, self.status) @property @@ -900,7 +901,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique parent = getattr(self, self._get_parent_field_name()) if parent is None: return - valid_fields = parent.get_ask_mapping().keys() + valid_fields = list(parent.get_ask_mapping().keys()) # Special cases allowed for workflows if hasattr(self, 'extra_vars'): valid_fields.extend(['survey_passwords', 'extra_vars']) @@ -991,6 +992,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique if not os.path.exists(settings.JOBOUTPUT_ROOT): os.makedirs(settings.JOBOUTPUT_ROOT) fd = tempfile.NamedTemporaryFile( + mode='w', prefix='{}-{}-'.format(self.model_to_str(), self.pk), suffix='.out', dir=settings.JOBOUTPUT_ROOT @@ -1030,10 +1032,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # don't bother actually fetching the data total = self.get_event_queryset().aggregate( total=models.Sum(models.Func(models.F('stdout'), function='LENGTH')) - )['total'] + )['total'] or 0 if total > max_supported: raise StdoutMaxBytesExceeded(total, max_supported) + # psycopg2's copy_expert writes bytes, but callers of this + # function assume a str-based fd will be returned; decode + # .write() calls on the fly to maintain this interface + _write = fd.write + fd.write = lambda s: _write(smart_text(s)) + cursor.copy_expert( "copy (select stdout from {} where {}={} order by start_line) to stdout".format( self._meta.db_table + 'event', @@ -1063,7 +1071,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique return content def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False): - content = self.result_stdout_raw_handle().read().decode('utf-8') + content = self.result_stdout_raw_handle().read() if redact_sensitive: content = UriCleaner.remove_sensitive(content) if escape_ascii: @@ -1096,7 +1104,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique else: end_actual = len(stdout_lines) - return_buffer = return_buffer.getvalue().decode('utf-8') + return_buffer = return_buffer.getvalue() if redact_sensitive: return_buffer = UriCleaner.remove_sensitive(return_buffer) if escape_ascii: @@ -1314,7 +1322,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def cancel(self, job_explanation=None, is_chain=False): if self.can_cancel: if not is_chain: - map(lambda x: x.cancel(job_explanation=self._build_job_explanation(), is_chain=True), self.get_jobs_fail_chain()) + for x in self.get_jobs_fail_chain(): + x.cancel(job_explanation=self._build_job_explanation(), is_chain=True) if not self.cancel_flag: self.cancel_flag = True diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 1a471a265d..17b869f418 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -2,7 +2,6 @@ # All Rights Reserved. # Python -#import urlparse import logging # Django @@ -37,7 +36,7 @@ from awx.main.redact import REPLACE_STR from awx.main.fields import JSONField from copy import copy -from urlparse import urljoin +from urllib.parse import urljoin __all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index 650848c78c..8d8755f9bf 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -35,7 +35,7 @@ class MattermostBackend(AWXBaseEmailBackend): for m in messages: payload = {} for opt, optval in {'mattermost_icon_url':'icon_url', - 'mattermost_channel': 'channel', 'mattermost_username': 'username'}.iteritems(): + 'mattermost_channel': 'channel', 'mattermost_username': 'username'}.items(): optvalue = getattr(self, opt) if optvalue is not None: payload[optval] = optvalue.strip() diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py index f316bf41c9..c7fe84f0b2 100644 --- a/awx/main/notifications/rocketchat_backend.py +++ b/awx/main/notifications/rocketchat_backend.py @@ -33,7 +33,7 @@ class RocketChatBackend(AWXBaseEmailBackend): for m in messages: payload = {"text": m.subject} for opt, optval in {'rocketchat_icon_url': 'icon_url', - 'rocketchat_username': 'username'}.iteritems(): + 'rocketchat_username': 'username'}.items(): optvalue = getattr(self, opt) if optvalue is not None: payload[optval] = optvalue.strip() diff --git a/awx/main/queue.py b/awx/main/queue.py index d8306c05ba..867cb68a8a 100644 --- a/awx/main/queue.py +++ b/awx/main/queue.py @@ -6,8 +6,6 @@ import json import logging import os -from six.moves import xrange - # Django from django.conf import settings @@ -51,7 +49,7 @@ class CallbackQueueDispatcher(object): if not self.callback_connection or not self.connection_queue: return active_pid = os.getpid() - for retry_count in xrange(4): + for retry_count in range(4): try: if not hasattr(self, 'connection_pid'): self.connection_pid = active_pid diff --git a/awx/main/redact.py b/awx/main/redact.py index 16c8fc0513..ec6211910b 100644 --- a/awx/main/redact.py +++ b/awx/main/redact.py @@ -1,12 +1,12 @@ import re -import urlparse +import urllib.parse as urlparse REPLACE_STR = '$encrypted$' class UriCleaner(object): REPLACE_STR = REPLACE_STR - SENSITIVE_URI_PATTERN = re.compile(ur'(\w+:(\/?\/?)[^\s]+)', re.MULTILINE) # NOQA + SENSITIVE_URI_PATTERN = re.compile(r'(\w+:(\/?\/?)[^\s]+)', re.MULTILINE) # NOQA @staticmethod def remove_sensitive(cleartext): diff --git a/awx/main/scheduler/dag_simple.py b/awx/main/scheduler/dag_simple.py index f3120581d1..71f1ff73c2 100644 --- a/awx/main/scheduler/dag_simple.py +++ b/awx/main/scheduler/dag_simple.py @@ -152,8 +152,8 @@ class SimpleDAG(object): return self._get_dependencies_by_label(this_ord, label) else: nodes = [] - map(lambda l: nodes.extend(self._get_dependencies_by_label(this_ord, l)), - self.node_from_edges_by_label.keys()) + for l in self.node_from_edges_by_label.keys(): + nodes.extend(self._get_dependencies_by_label(this_ord, l)) return nodes def _get_dependents_by_label(self, node_index, label): @@ -168,8 +168,8 @@ class SimpleDAG(object): return self._get_dependents_by_label(this_ord, label) else: nodes = [] - map(lambda l: nodes.extend(self._get_dependents_by_label(this_ord, l)), - self.node_to_edges_by_label.keys()) + for l in self.node_to_edges_by_label.keys(): + nodes.extend(self._get_dependents_by_label(this_ord, l)) return nodes def get_root_nodes(self): @@ -189,7 +189,7 @@ class SimpleDAG(object): node_obj = stack.pop() children = [node['node_object'] for node in self.get_dependencies(node_obj)] - children_to_add = filter(lambda node_obj: node_obj not in node_objs_visited, children) + children_to_add = list(filter(lambda node_obj: node_obj not in node_objs_visited, children)) if children_to_add: if node_obj in path: diff --git a/awx/main/scheduler/dependency_graph.py b/awx/main/scheduler/dependency_graph.py index 44ab2225e3..bbe2e71eba 100644 --- a/awx/main/scheduler/dependency_graph.py +++ b/awx/main/scheduler/dependency_graph.py @@ -145,4 +145,5 @@ class DependencyGraph(object): self.mark_inventory_update(job.inventory_id) def add_jobs(self, jobs): - map(lambda j: self.add_job(j), jobs) + for j in jobs: + self.add_job(j) diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index d6f3ae14fa..bca1c7dfc6 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -8,7 +8,6 @@ import uuid import json import six import random -from sets import Set # Django from django.db import transaction, connection @@ -77,14 +76,14 @@ class TaskManager(): def get_latest_project_update_tasks(self, all_sorted_tasks): - project_ids = Set() + project_ids = set() for task in all_sorted_tasks: if isinstance(task, Job): project_ids.add(task.project_id) return ProjectUpdate.objects.filter(id__in=project_ids) def get_latest_inventory_update_tasks(self, all_sorted_tasks): - inventory_ids = Set() + inventory_ids = set() for task in all_sorted_tasks: if isinstance(task, Job): inventory_ids.add(task.inventory_id) @@ -96,7 +95,7 @@ class TaskManager(): return graph_workflow_jobs def get_inventory_source_tasks(self, all_sorted_tasks): - inventory_ids = Set() + inventory_ids = set() for task in all_sorted_tasks: if isinstance(task, Job): inventory_ids.add(task.inventory_id) @@ -174,7 +173,8 @@ class TaskManager(): status_changed = True else: workflow_nodes = dag.mark_dnr_nodes() - map(lambda n: n.save(update_fields=['do_not_run']), workflow_nodes) + for n in workflow_nodes: + n.save(update_fields=['do_not_run']) is_done = dag.is_workflow_done() if not is_done: continue @@ -284,7 +284,9 @@ class TaskManager(): connection.on_commit(post_commit) def process_running_tasks(self, running_tasks): - map(lambda task: self.graph[task.instance_group.name]['graph'].add_job(task) if task.instance_group else None, running_tasks) + for task in running_tasks: + if task.instance_group: + self.graph[task.instance_group.name]['graph'].add_job(task) def create_project_update(self, task): project_task = Project.objects.get(id=task.project_id).create_project_update( @@ -323,7 +325,7 @@ class TaskManager(): for dep in dependencies: # Add task + all deps except self - dep.dependent_jobs.add(*([task] + filter(lambda d: d != dep, dependencies))) + dep.dependent_jobs.add(*([task] + [d for d in dependencies if d != dep])) def get_latest_inventory_update(self, inventory_source): latest_inventory_update = InventoryUpdate.objects.filter(inventory_source=inventory_source).order_by("-created") @@ -456,7 +458,7 @@ class TaskManager(): task.log_format, rampart_group.name, execution_instance.hostname)) if execution_instance: self.graph[rampart_group.name]['graph'].add_job(task) - tasks_to_fail = filter(lambda t: t != task, dependency_tasks) + tasks_to_fail = [t for t in dependency_tasks if t != task] tasks_to_fail += [dependent_task] self.start_task(task, rampart_group, tasks_to_fail, execution_instance) found_acceptable_queue = True @@ -534,13 +536,13 @@ class TaskManager(): return (self.graph[instance_group]['capacity_total'] - self.graph[instance_group]['consumed_capacity']) def process_tasks(self, all_sorted_tasks): - running_tasks = filter(lambda t: t.status in ['waiting', 'running'], all_sorted_tasks) + running_tasks = [t for t in all_sorted_tasks if t.status in ['waiting', 'running']] self.calculate_capacity_consumed(running_tasks) self.process_running_tasks(running_tasks) - pending_tasks = filter(lambda t: t.status in 'pending', all_sorted_tasks) + pending_tasks = [t for t in all_sorted_tasks if t.status == 'pending'] self.process_pending_tasks(pending_tasks) def _schedule(self): diff --git a/awx/main/signals.py b/awx/main/signals.py index ce21f92e8d..d417a7b695 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -20,9 +20,9 @@ from django.db.models.signals import ( ) from django.dispatch import receiver from django.contrib.auth import SESSION_KEY +from django.contrib.contenttypes.models import ContentType from django.contrib.sessions.models import Session from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ # Django-CRUM from crum import get_current_request, get_current_user @@ -32,7 +32,6 @@ import six # AWX from awx.main.models import * # noqa -from awx.api.serializers import * # noqa from awx.main.constants import CENSOR_VALUE from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore, get_current_apps from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates @@ -80,23 +79,28 @@ def emit_event_detail(serializer, relation, **kwargs): def emit_job_event_detail(sender, **kwargs): - emit_event_detail(JobEventWebSocketSerializer, 'job_id', **kwargs) + from awx.api import serializers + emit_event_detail(serializers.JobEventWebSocketSerializer, 'job_id', **kwargs) def emit_ad_hoc_command_event_detail(sender, **kwargs): - emit_event_detail(AdHocCommandEventWebSocketSerializer, 'ad_hoc_command_id', **kwargs) + from awx.api import serializers + emit_event_detail(serializers.AdHocCommandEventWebSocketSerializer, 'ad_hoc_command_id', **kwargs) def emit_project_update_event_detail(sender, **kwargs): - emit_event_detail(ProjectUpdateEventWebSocketSerializer, 'project_update_id', **kwargs) + from awx.api import serializers + emit_event_detail(serializers.ProjectUpdateEventWebSocketSerializer, 'project_update_id', **kwargs) def emit_inventory_update_event_detail(sender, **kwargs): - emit_event_detail(InventoryUpdateEventWebSocketSerializer, 'inventory_update_id', **kwargs) + from awx.api import serializers + emit_event_detail(serializers.InventoryUpdateEventWebSocketSerializer, 'inventory_update_id', **kwargs) def emit_system_job_event_detail(sender, **kwargs): - emit_event_detail(SystemJobEventWebSocketSerializer, 'system_job_id', **kwargs) + from awx.api import serializers + emit_event_detail(serializers.SystemJobEventWebSocketSerializer, 'system_job_id', **kwargs) def emit_update_inventory_computed_fields(sender, **kwargs): @@ -347,7 +351,7 @@ class ActivityStreamEnabled(threading.local): def __init__(self): self.enabled = True - def __nonzero__(self): + def __bool__(self): return bool(self.enabled and getattr(settings, 'ACTIVITY_STREAM_ENABLED', True)) @@ -385,31 +389,38 @@ def disable_computed_fields(): connect_computed_field_signals() -model_serializer_mapping = { - Organization: OrganizationSerializer, - Inventory: InventorySerializer, - Host: HostSerializer, - Group: GroupSerializer, - InstanceGroup: InstanceGroupSerializer, - InventorySource: InventorySourceSerializer, - CustomInventoryScript: CustomInventoryScriptSerializer, - Credential: CredentialSerializer, - Team: TeamSerializer, - Project: ProjectSerializer, - JobTemplate: JobTemplateWithSpecSerializer, - Job: JobSerializer, - AdHocCommand: AdHocCommandSerializer, - NotificationTemplate: NotificationTemplateSerializer, - Notification: NotificationSerializer, - CredentialType: CredentialTypeSerializer, - Schedule: ScheduleSerializer, - Label: LabelSerializer, - WorkflowJobTemplate: WorkflowJobTemplateWithSpecSerializer, - WorkflowJobTemplateNode: WorkflowJobTemplateNodeSerializer, - WorkflowJob: WorkflowJobSerializer, - OAuth2AccessToken: OAuth2TokenSerializer, - OAuth2Application: OAuth2ApplicationSerializer, -} +def model_serializer_mapping(): + from awx.api import serializers + from awx.main import models + + from awx.conf.models import Setting + from awx.conf.serializers import SettingSerializer + return { + Setting: SettingSerializer, + models.Organization: serializers.OrganizationSerializer, + models.Inventory: serializers.InventorySerializer, + models.Host: serializers.HostSerializer, + models.Group: serializers.GroupSerializer, + models.InstanceGroup: serializers.InstanceGroupSerializer, + models.InventorySource: serializers.InventorySourceSerializer, + models.CustomInventoryScript: serializers.CustomInventoryScriptSerializer, + models.Credential: serializers.CredentialSerializer, + models.Team: serializers.TeamSerializer, + models.Project: serializers.ProjectSerializer, + models.JobTemplate: serializers.JobTemplateWithSpecSerializer, + models.Job: serializers.JobSerializer, + models.AdHocCommand: serializers.AdHocCommandSerializer, + models.NotificationTemplate: serializers.NotificationTemplateSerializer, + models.Notification: serializers.NotificationSerializer, + models.CredentialType: serializers.CredentialTypeSerializer, + models.Schedule: serializers.ScheduleSerializer, + models.Label: serializers.LabelSerializer, + models.WorkflowJobTemplate: serializers.WorkflowJobTemplateWithSpecSerializer, + models.WorkflowJobTemplateNode: serializers.WorkflowJobTemplateNodeSerializer, + models.WorkflowJob: serializers.WorkflowJobSerializer, + models.OAuth2AccessToken: serializers.OAuth2TokenSerializer, + models.OAuth2Application: serializers.OAuth2ApplicationSerializer, + } def activity_stream_create(sender, instance, created, **kwargs): @@ -422,7 +433,7 @@ def activity_stream_create(sender, instance, created, **kwargs): if getattr(_type, '_deferred', False): return object1 = camelcase_to_underscore(instance.__class__.__name__) - changes = model_to_dict(instance, model_serializer_mapping) + changes = model_to_dict(instance, model_serializer_mapping()) # Special case where Job survey password variables need to be hidden if type(instance) == Job: changes['credentials'] = [ @@ -461,7 +472,7 @@ def activity_stream_update(sender, instance, **kwargs): return new = instance - changes = model_instance_diff(old, new, model_serializer_mapping) + changes = model_instance_diff(old, new, model_serializer_mapping()) if changes is None: return _type = type(instance) @@ -506,7 +517,7 @@ def activity_stream_delete(sender, instance, **kwargs): _type = type(instance) if getattr(_type, '_deferred', False): return - changes.update(model_to_dict(instance, model_serializer_mapping)) + changes.update(model_to_dict(instance, model_serializer_mapping())) object1 = camelcase_to_underscore(instance.__class__.__name__) if type(instance) == OAuth2AccessToken: changes['token'] = CENSOR_VALUE @@ -643,7 +654,7 @@ def save_user_session_membership(sender, **kwargs): if len(expired): consumers.emit_channel_notification( 'control-limit_reached_{}'.format(user.pk), - dict(group_name='control', reason=unicode(_('limit_reached'))) + dict(group_name='control', reason='limit_reached') ) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 5debb8b2fc..14790f6ad0 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -3,8 +3,7 @@ # Python from collections import OrderedDict, namedtuple -import ConfigParser -import cStringIO +import configparser import errno import functools import importlib @@ -18,7 +17,6 @@ import stat import tempfile import time import traceback -import urlparse from distutils.version import LooseVersion as Version import yaml import fcntl @@ -26,6 +24,8 @@ try: import psutil except Exception: psutil = None +from io import StringIO +import urllib.parse as urlparse # Django from django.conf import settings @@ -185,9 +185,9 @@ def apply_cluster_membership_policies(): actual_instances = [Node(obj=i, groups=[]) for i in considered_instances if i.managed_by_policy] logger.info("Total non-isolated instances:{} available for policy: {}".format( total_instances, len(actual_instances))) - for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)): + for g in sorted(actual_groups, key=lambda x: len(x.instances)): policy_min_added = [] - for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): + for i in sorted(actual_instances, key=lambda x: len(x.groups)): if len(g.instances) >= g.obj.policy_instance_minimum: break if i.obj.id in g.instances: @@ -201,9 +201,9 @@ def apply_cluster_membership_policies(): logger.info(six.text_type("Policy minimum, adding Instances {} to Group {}").format(policy_min_added, g.obj.name)) # Finally, process instance policy percentages - for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)): + for g in sorted(actual_groups, key=lambda x: len(x.instances)): policy_per_added = [] - for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): + for i in sorted(actual_instances, key=lambda x: len(x.groups)): if i.obj.id in g.instances: # If the instance is already _in_ the group, it was # applied earlier via a minimum policy or policy list @@ -294,7 +294,7 @@ def send_notifications(notification_list, job_id=None): finally: try: notification.save(update_fields=update_fields) - except Exception as e: + except Exception: logger.exception(six.text_type('Error saving notification {} result.').format(notification.id)) @@ -722,12 +722,12 @@ class BaseTask(object): ssh_ver = get_ssh_version() ssh_too_old = True if ssh_ver == "unknown" else Version(ssh_ver) < Version("6.0") openssh_keys_supported = ssh_ver != "unknown" and Version(ssh_ver) >= Version("6.5") - for credential, data in private_data.get('credentials', {}).iteritems(): + for credential, data in private_data.get('credentials', {}).items(): # Bail out now if a private key was provided in OpenSSH format # and we're running an earlier version (<6.5). if 'OPENSSH PRIVATE KEY' in data and not openssh_keys_supported: raise RuntimeError(OPENSSH_KEY_ERROR) - for credential, data in private_data.get('credentials', {}).iteritems(): + for credential, data in private_data.get('credentials', {}).items(): # OpenSSH formatted keys must have a trailing newline to be # accepted by ssh-add. if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'): @@ -831,7 +831,7 @@ class BaseTask(object): json_data = json.dumps(script_data) handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) f = os.fdopen(handle, 'w') - f.write('#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nprint %r\n' % json_data) + f.write('#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json_data) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IXUSR | stat.S_IWUSR) return path @@ -882,7 +882,7 @@ class BaseTask(object): if 'uuid' in event_data: cache_event = cache.get('ev-{}'.format(event_data['uuid']), None) if cache_event is not None: - event_data.update(cache_event) + event_data.update(json.loads(cache_event)) dispatcher.dispatch(event_data) return OutputEventFilter(event_callback) @@ -1588,7 +1588,7 @@ class RunProjectUpdate(BaseTask): def build_safe_args(self, project_update, **kwargs): pwdict = dict(kwargs.get('passwords', {}).items()) - for pw_name, pw_val in pwdict.items(): + for pw_name, pw_val in list(pwdict.items()): if pw_name in ('', 'yes', 'no', 'scm_username'): continue pwdict[pw_name] = HIDDEN_PASSWORD @@ -1609,7 +1609,7 @@ class RunProjectUpdate(BaseTask): scm_username = kwargs.get('passwords', {}).get('scm_username', '') scm_password = kwargs.get('passwords', {}).get('scm_password', '') pwdict = dict(kwargs.get('passwords', {}).items()) - for pw_name, pw_val in pwdict.items(): + for pw_name, pw_val in list(pwdict.items()): if pw_name in ('', 'yes', 'no', 'scm_username'): continue pwdict[pw_name] = HIDDEN_PASSWORD @@ -1850,7 +1850,7 @@ class RunInventoryUpdate(BaseTask): ) return private_data - cp = ConfigParser.ConfigParser() + cp = configparser.RawConfigParser() # Build custom ec2.ini for ec2 inventory script to use. if inventory_update.source == 'ec2': section = 'ec2' @@ -1881,14 +1881,14 @@ class RunInventoryUpdate(BaseTask): cache_path = tempfile.mkdtemp(prefix='ec2_cache', dir=kwargs.get('private_data_dir', None)) ec2_opts['cache_path'] = cache_path ec2_opts.setdefault('cache_max_age', '300') - for k,v in ec2_opts.items(): + for k, v in ec2_opts.items(): cp.set(section, k, six.text_type(v)) # Allow custom options to vmware inventory script. elif inventory_update.source == 'vmware': section = 'vmware' cp.add_section(section) - cp.set('vmware', 'cache_max_age', 0) + cp.set('vmware', 'cache_max_age', '0') cp.set('vmware', 'validate_certs', str(settings.VMWARE_VALIDATE_CERTS)) cp.set('vmware', 'username', credential.username) cp.set('vmware', 'password', decrypt_field(credential, 'password')) @@ -1900,7 +1900,7 @@ class RunInventoryUpdate(BaseTask): if inventory_update.group_by: vmware_opts.setdefault('groupby_patterns', inventory_update.group_by) - for k,v in vmware_opts.items(): + for k, v in vmware_opts.items(): cp.set(section, k, six.text_type(v)) elif inventory_update.source == 'satellite6': @@ -1913,9 +1913,9 @@ class RunInventoryUpdate(BaseTask): foreman_opts = dict(inventory_update.source_vars_dict.items()) foreman_opts.setdefault('ssl_verify', 'False') for k, v in foreman_opts.items(): - if k == 'satellite6_group_patterns' and isinstance(v, basestring): + if k == 'satellite6_group_patterns' and isinstance(v, str): group_patterns = v - elif k == 'satellite6_group_prefix' and isinstance(v, basestring): + elif k == 'satellite6_group_prefix' and isinstance(v, str): group_prefix = v elif k == 'satellite6_want_hostcollections' and isinstance(v, bool): want_hostcollections = v @@ -1930,8 +1930,8 @@ class RunInventoryUpdate(BaseTask): section = 'ansible' cp.add_section(section) cp.set(section, 'group_patterns', group_patterns) - cp.set(section, 'want_facts', True) - cp.set(section, 'want_hostcollections', want_hostcollections) + cp.set(section, 'want_facts', 'True') + cp.set(section, 'want_hostcollections', str(want_hostcollections)) cp.set(section, 'group_prefix', group_prefix) section = 'cache' @@ -1952,7 +1952,7 @@ class RunInventoryUpdate(BaseTask): cloudforms_opts = dict(inventory_update.source_vars_dict.items()) for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix', 'prefer_ipv4']: if opt in cloudforms_opts: - cp.set(section, opt, cloudforms_opts[opt]) + cp.set(section, opt, str(cloudforms_opts[opt])) section = 'cache' cp.add_section(section) @@ -1978,12 +1978,12 @@ class RunInventoryUpdate(BaseTask): ) azure_rm_opts = dict(inventory_update.source_vars_dict.items()) - for k,v in azure_rm_opts.items(): + for k, v in azure_rm_opts.items(): cp.set(section, k, six.text_type(v)) # Return INI content. if cp.sections(): - f = cStringIO.StringIO() + f = StringIO() cp.write(f) private_data['credentials'][credential] = f.getvalue() return private_data @@ -2054,7 +2054,7 @@ class RunInventoryUpdate(BaseTask): # by default, the GCE inventory source caches results on disk for # 5 minutes; disable this behavior - cp = ConfigParser.ConfigParser() + cp = configparser.ConfigParser() cp.add_section('cache') cp.set('cache', 'cache_max_age', '0') handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) @@ -2134,7 +2134,7 @@ class RunInventoryUpdate(BaseTask): f = os.fdopen(handle, 'w') if inventory_update.source_script is None: raise RuntimeError('Inventory Script does not exist') - f.write(inventory_update.source_script.script.encode('utf-8')) + f.write(inventory_update.source_script.script) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) args.append(path) diff --git a/awx/main/templatetags/swagger.py b/awx/main/templatetags/swagger.py index 314d599710..62e61abdff 100644 --- a/awx/main/templatetags/swagger.py +++ b/awx/main/templatetags/swagger.py @@ -1,5 +1,4 @@ import re -from django.utils.encoding import force_unicode from django import template register = template.Library() @@ -12,7 +11,6 @@ VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h( def anora(text): # https://pypi.python.org/pypi/anora # < 10 lines of BSD-3 code, not worth a dependency - text = force_unicode(text) anora = 'an' if not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text) else 'a' return anora + ' ' + text diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 1435c4efa1..4cf63bc8ad 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -1,7 +1,7 @@ # Python import pytest -import mock +from unittest import mock from contextlib import contextmanager from awx.main.tests.factories import ( diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py index e85ce45a3c..db74d6280c 100644 --- a/awx/main/tests/docs/test_swagger_generation.py +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -7,7 +7,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.utils.functional import Promise from django.utils.encoding import force_text -from coreapi.compat import force_bytes from openapi_codec.encode import generate_swagger_object import pytest @@ -18,6 +17,8 @@ class i18nEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): return force_text(obj) + if type(obj) == bytes: + return force_text(obj) return super(i18nEncoder, self).default(obj) @@ -91,16 +92,16 @@ class TestSwaggerGeneration(): # for a reasonable number here; if this test starts failing, raise/lower the bounds paths = JSON['paths'] assert 250 < len(paths) < 300 - assert paths['/api/'].keys() == ['get'] - assert paths['/api/v2/'].keys() == ['get'] - assert sorted( + assert list(paths['/api/'].keys()) == ['get'] + assert list(paths['/api/v2/'].keys()) == ['get'] + assert list(sorted( paths['/api/v2/credentials/'].keys() - ) == ['get', 'post'] - assert sorted( + )) == ['get', 'post'] + assert list(sorted( paths['/api/v2/credentials/{id}/'].keys() - ) == ['delete', 'get', 'patch', 'put'] - assert paths['/api/v2/settings/'].keys() == ['get'] - assert paths['/api/v2/settings/{category_slug}/'].keys() == [ + )) == ['delete', 'get', 'patch', 'put'] + assert list(paths['/api/v2/settings/'].keys()) == ['get'] + assert list(paths['/api/v2/settings/{category_slug}/'].keys()) == [ 'get', 'put', 'patch', 'delete' ] @@ -162,9 +163,7 @@ class TestSwaggerGeneration(): @classmethod def teardown_class(cls): with open('swagger.json', 'w') as f: - data = force_bytes( - json.dumps(cls.JSON, cls=i18nEncoder, indent=2) - ) + data = json.dumps(cls.JSON, cls=i18nEncoder, indent=2) # replace ISO dates w/ the same value so we don't generate # needless diffs data = re.sub( diff --git a/awx/main/tests/factories/objects.py b/awx/main/tests/factories/objects.py index 7de49d998c..8246a71908 100644 --- a/awx/main/tests/factories/objects.py +++ b/awx/main/tests/factories/objects.py @@ -26,7 +26,7 @@ def generate_role_objects(objects): combined_objects = {} for o in objects: if type(o) is dict: - for k,v in o.iteritems(): + for k, v in o.items(): if combined_objects.get(k) is not None: raise NotUnique(k, combined_objects) combined_objects[k] = v diff --git a/awx/main/tests/functional/api/test_activity_streams.py b/awx/main/tests/functional/api/test_activity_streams.py index 00578753ac..72ae48d9e4 100644 --- a/awx/main/tests/functional/api/test_activity_streams.py +++ b/awx/main/tests/functional/api/test_activity_streams.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from awx.api.versioning import reverse diff --git a/awx/main/tests/functional/api/test_adhoc.py b/awx/main/tests/functional/api/test_adhoc.py index c315abb1b2..a081a36cce 100644 --- a/awx/main/tests/functional/api/test_adhoc.py +++ b/awx/main/tests/functional/api/test_adhoc.py @@ -1,4 +1,4 @@ -import mock # noqa +from unittest import mock # noqa import pytest from awx.api.versioning import reverse @@ -38,7 +38,7 @@ def post_adhoc(post, inventory, machine_credential): if 'credential' not in data: data['credential'] = machine_credential.id - for k,v in data.items(): + for k, v in list(data.items()): if v is None: del data[k] diff --git a/awx/main/tests/functional/api/test_credential.py b/awx/main/tests/functional/api/test_credential.py index 2800a70be3..a031114269 100644 --- a/awx/main/tests/functional/api/test_credential.py +++ b/awx/main/tests/functional/api/test_credential.py @@ -1,9 +1,11 @@ import itertools import re -import mock # noqa +from unittest import mock # noqa import pytest +from django.utils.encoding import smart_str + from awx.main.models import (AdHocCommand, Credential, CredentialType, Job, JobTemplate, Inventory, InventorySource, Project, WorkflowJobNode) @@ -255,7 +257,7 @@ def test_credential_validation_error_with_bad_user(post, admin, version, credent admin ) assert response.status_code == 400 - assert response.data['user'][0] == 'Incorrect type. Expected pk value, received unicode.' + assert response.data['user'][0] == 'Incorrect type. Expected pk value, received str.' @pytest.mark.django_db @@ -799,7 +801,7 @@ def test_field_dependencies(get, post, organization, admin, kind, extraneous): admin ) assert response.status_code == 400 - assert re.search('cannot be set unless .+ is set.', response.content) + assert re.search('cannot be set unless .+ is set.', smart_str(response.content)) assert Credential.objects.count() == 0 diff --git a/awx/main/tests/functional/api/test_deprecated_credential_assignment.py b/awx/main/tests/functional/api/test_deprecated_credential_assignment.py index 8beaddd391..37ab829e9e 100644 --- a/awx/main/tests/functional/api/test_deprecated_credential_assignment.py +++ b/awx/main/tests/functional/api/test_deprecated_credential_assignment.py @@ -1,7 +1,8 @@ import json -import mock +from unittest import mock import pytest +from django.utils.encoding import smart_str from awx.main.models import Credential, CredentialType, Job from awx.api.versioning import reverse @@ -48,7 +49,7 @@ def test_ssh_credential_access(get, job_template, admin, machine_credential): def test_invalid_credential_update(get, patch, job_template, admin, key): url = reverse('api:job_template_detail', kwargs={'pk': job_template.pk, 'version': 'v1'}) resp = patch(url, {key: 999999}, admin, expect=400) - assert 'Credential 999999 does not exist' in json.loads(resp.content)[key] + assert 'Credential 999999 does not exist' in json.loads(smart_str(smart_str(resp.content)))[key] @pytest.mark.django_db @@ -63,7 +64,7 @@ def test_ssh_credential_update(get, patch, job_template, admin, machine_credenti def test_ssh_credential_update_invalid_kind(get, patch, job_template, admin, vault_credential): url = reverse('api:job_template_detail', kwargs={'pk': job_template.pk}) resp = patch(url, {'credential': vault_credential.pk}, admin, expect=400) - assert 'You must provide an SSH credential.' in resp.content + assert 'You must provide an SSH credential.' in smart_str(resp.content) @pytest.mark.django_db @@ -89,7 +90,7 @@ def test_vault_credential_update_invalid_kind(get, patch, job_template, admin, machine_credential): url = reverse('api:job_template_detail', kwargs={'pk': job_template.pk}) resp = patch(url, {'vault_credential': machine_credential.pk}, admin, expect=400) - assert 'You must provide a vault credential.' in resp.content + assert 'You must provide a vault credential.' in smart_str(resp.content) @pytest.mark.django_db @@ -118,7 +119,7 @@ def test_extra_credentials_requires_cloud_or_net(get, post, job_template, admin, for cred in (machine_credential, vault_credential): resp = post(url, {'associate': True, 'id': cred.pk}, admin, expect=400) - assert 'Extra credentials must be network or cloud.' in resp.content + assert 'Extra credentials must be network or cloud.' in smart_str(resp.content) post(url, {'associate': True, 'id': credential.pk}, admin, expect=204) assert get(url, admin).data['count'] == 1 @@ -148,7 +149,7 @@ def test_prevent_multiple_machine_creds(get, post, job_template, admin, machine_ assert get(url, admin).data['count'] == 1 resp = post(url, _new_cred('Second Cred'), admin, expect=400) - assert 'Cannot assign multiple Machine credentials.' in resp.content + assert 'Cannot assign multiple Machine credentials.' in smart_str(resp.content) @pytest.mark.django_db @@ -180,7 +181,7 @@ def test_prevent_multiple_machine_creds_at_launch(get, post, job_template, admin creds = [machine_credential.pk, other_cred.pk] url = reverse('api:job_template_launch', kwargs={'pk': job_template.pk}) resp = post(url, {'credentials': creds}, admin) - assert 'Cannot assign multiple Machine credentials.' in resp.content + assert 'Cannot assign multiple Machine credentials.' in smart_str(resp.content) @pytest.mark.django_db @@ -205,7 +206,7 @@ def test_extra_credentials_unique_by_kind(get, post, job_template, admin, assert get(url, admin).data['count'] == 1 resp = post(url, _new_cred('Second Cred'), admin, expect=400) - assert 'Cannot assign multiple Amazon Web Services credentials.' in resp.content + assert 'Cannot assign multiple Amazon Web Services credentials.' in smart_str(resp.content) @pytest.mark.django_db @@ -407,14 +408,14 @@ def test_inventory_source_deprecated_credential(get, patch, admin, ec2_source, c url = reverse('api:inventory_source_detail', kwargs={'pk': ec2_source.pk}) patch(url, {'credential': credential.pk}, admin, expect=200) resp = get(url, admin, expect=200) - assert json.loads(resp.content)['credential'] == credential.pk + assert json.loads(smart_str(resp.content))['credential'] == credential.pk @pytest.mark.django_db def test_inventory_source_invalid_deprecated_credential(patch, admin, ec2_source, credential): url = reverse('api:inventory_source_detail', kwargs={'pk': ec2_source.pk}) resp = patch(url, {'credential': 999999}, admin, expect=400) - assert 'Credential 999999 does not exist' in resp.content + assert 'Credential 999999 does not exist' in smart_str(resp.content) @pytest.mark.django_db diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py index 8fa6178cb6..0c7c43b46c 100644 --- a/awx/main/tests/functional/api/test_fact_versions.py +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -1,9 +1,8 @@ # Python -import mock +from unittest import mock import pytest from datetime import timedelta -import urlparse -import urllib +import urllib.parse # AWX from awx.api.versioning import reverse @@ -33,9 +32,9 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params= def check_url(url1_full, fact_known, module): - url1_split = urlparse.urlsplit(url1_full) + url1_split = urllib.parse.urlsplit(url1_full) url1 = url1_split.path - url1_params = urlparse.parse_qsl(url1_split.query) + url1_params = urllib.parse.parse_qsl(url1_split.query) url2 = reverse('api:host_fact_compare_view', kwargs={'pk': fact_known.host.pk}) url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))] @@ -44,7 +43,7 @@ def check_url(url1_full, fact_known, module): # Sort before comparing because urlencode can't be trusted url1_params_sorted = sorted(url1_params, key=lambda val: val[0]) url2_params_sorted = sorted(url2_params, key=lambda val: val[0]) - assert urllib.urlencode(url1_params_sorted) == urllib.urlencode(url2_params_sorted) + assert urllib.parse.urlencode(url1_params_sorted) == urllib.parse.urlencode(url2_params_sorted) def check_response_facts(facts_known, response): diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py index 9a46871973..63fbb06824 100644 --- a/awx/main/tests/functional/api/test_fact_view.py +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest import json diff --git a/awx/main/tests/functional/api/test_host_filter.py b/awx/main/tests/functional/api/test_host_filter.py index 45542f6325..f65f737fd7 100644 --- a/awx/main/tests/functional/api/test_host_filter.py +++ b/awx/main/tests/functional/api/test_host_filter.py @@ -1,7 +1,7 @@ # TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint. # Other host tests should live here to make this test suite more complete. import pytest -import urllib +import urllib.parse from awx.api.versioning import reverse @@ -24,7 +24,7 @@ def inventory_structure(): def test_q1(inventory_structure, get, user): def evaluate_query(query, expected_hosts): url = reverse('api:host_list') - get_params = "?host_filter=%s" % urllib.quote(query, safe='') + get_params = "?host_filter=%s" % urllib.parse.quote(query, safe='') response = get(url + get_params, user('admin', True)) hosts = response.data['results'] diff --git a/awx/main/tests/functional/api/test_instance_group.py b/awx/main/tests/functional/api/test_instance_group.py index 939db63dc2..77c3997bd8 100644 --- a/awx/main/tests/functional/api/test_instance_group.py +++ b/awx/main/tests/functional/api/test_instance_group.py @@ -66,15 +66,15 @@ def create_project_update_factory(instance_group, project): @pytest.fixture def instance_group_jobs_running(instance_group, create_job_factory, create_project_update_factory): - jobs_running = [create_job_factory(status='running') for i in xrange(0, 2)] - project_updates_running = [create_project_update_factory(status='running') for i in xrange(0, 2)] + jobs_running = [create_job_factory(status='running') for i in range(0, 2)] + project_updates_running = [create_project_update_factory(status='running') for i in range(0, 2)] return jobs_running + project_updates_running @pytest.fixture def instance_group_jobs_successful(instance_group, create_job_factory, create_project_update_factory): - jobs_successful = [create_job_factory(status='successful') for i in xrange(0, 2)] - project_updates_successful = [create_project_update_factory(status='successful') for i in xrange(0, 2)] + jobs_successful = [create_job_factory(status='successful') for i in range(0, 2)] + project_updates_successful = [create_project_update_factory(status='successful') for i in range(0, 2)] return jobs_successful + project_updates_successful diff --git a/awx/main/tests/functional/api/test_inventory.py b/awx/main/tests/functional/api/test_inventory.py index 56e184a29a..a9cecf8ba0 100644 --- a/awx/main/tests/functional/api/test_inventory.py +++ b/awx/main/tests/functional/api/test_inventory.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pytest -import mock +from unittest import mock from django.core.exceptions import ValidationError diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py index 074a1bfcea..a2aef4b525 100644 --- a/awx/main/tests/functional/api/test_job.py +++ b/awx/main/tests/functional/api/test_job.py @@ -1,6 +1,6 @@ # Python import pytest -import mock +from unittest import mock from dateutil.parser import parse from dateutil.relativedelta import relativedelta from crum import impersonate diff --git a/awx/main/tests/functional/api/test_job_runtime_params.py b/awx/main/tests/functional/api/test_job_runtime_params.py index 4b21e3696b..340e2e3082 100644 --- a/awx/main/tests/functional/api/test_job_runtime_params.py +++ b/awx/main/tests/functional/api/test_job_runtime_params.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest import yaml import json diff --git a/awx/main/tests/functional/api/test_oauth.py b/awx/main/tests/functional/api/test_oauth.py index ad24ecadfb..7b55fcb2d7 100644 --- a/awx/main/tests/functional/api/test_oauth.py +++ b/awx/main/tests/functional/api/test_oauth.py @@ -5,12 +5,12 @@ import json from django.db import connection from django.test.utils import override_settings from django.test import Client +from django.utils.encoding import smart_str, smart_bytes from awx.main.utils.encryption import decrypt_value, get_encryption_key from awx.api.versioning import reverse, drf_reverse from awx.main.models.oauth import (OAuth2Application as Application, - OAuth2AccessToken as AccessToken, - ) + OAuth2AccessToken as AccessToken) from awx.sso.models import UserEnterpriseAuth from oauth2_provider.models import RefreshToken @@ -22,11 +22,11 @@ def test_personal_access_token_creation(oauth_application, post, alice): url, data='grant_type=password&username=alice&password=alice&scope=read', content_type='application/x-www-form-urlencoded', - HTTP_AUTHORIZATION='Basic ' + base64.b64encode(':'.join([ + HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([ oauth_application.client_id, oauth_application.client_secret - ])) + ])))) ) - resp_json = resp._container[0] + resp_json = smart_str(resp._container[0]) assert 'access_token' in resp_json assert 'scope' in resp_json assert 'refresh_token' in resp_json @@ -43,15 +43,15 @@ def test_token_creation_disabled_for_external_accounts(oauth_application, post, url, data='grant_type=password&username=alice&password=alice&scope=read', content_type='application/x-www-form-urlencoded', - HTTP_AUTHORIZATION='Basic ' + base64.b64encode(':'.join([ + HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([ oauth_application.client_id, oauth_application.client_secret - ])), + ])))), status=status ) if allow_oauth: assert AccessToken.objects.count() == 1 else: - assert 'OAuth2 Tokens cannot be created by users associated with an external authentication provider' in resp.content + assert 'OAuth2 Tokens cannot be created by users associated with an external authentication provider' in smart_str(resp.content) # noqa assert AccessToken.objects.count() == 0 @@ -302,9 +302,9 @@ def test_refresh_accesstoken(oauth_application, post, get, delete, admin): refresh_url, data='grant_type=refresh_token&refresh_token=' + refresh_token.token, content_type='application/x-www-form-urlencoded', - HTTP_AUTHORIZATION='Basic ' + base64.b64encode(':'.join([ + HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([ oauth_application.client_id, oauth_application.client_secret - ])) + ])))) ) assert RefreshToken.objects.filter(token=refresh_token).exists() original_refresh_token = RefreshToken.objects.get(token=refresh_token) diff --git a/awx/main/tests/functional/api/test_organizations.py b/awx/main/tests/functional/api/test_organizations.py index 5e4793b6a5..25af588f53 100644 --- a/awx/main/tests/functional/api/test_organizations.py +++ b/awx/main/tests/functional/api/test_organizations.py @@ -7,7 +7,7 @@ import os from backports.tempfile import TemporaryDirectory from django.conf import settings import pytest -import mock +from unittest import mock # AWX from awx.main.models import * # noqa @@ -38,14 +38,14 @@ def create_project_update_factory(organization, project): @pytest.fixture def organization_jobs_successful(create_job_factory, create_project_update_factory): - return [create_job_factory(status='successful') for i in xrange(0, 2)] + \ - [create_project_update_factory(status='successful') for i in xrange(0, 2)] + return [create_job_factory(status='successful') for i in range(0, 2)] + \ + [create_project_update_factory(status='successful') for i in range(0, 2)] @pytest.fixture def organization_jobs_running(create_job_factory, create_project_update_factory): - return [create_job_factory(status='running') for i in xrange(0, 2)] + \ - [create_project_update_factory(status='running') for i in xrange(0, 2)] + return [create_job_factory(status='running') for i in range(0, 2)] + \ + [create_project_update_factory(status='running') for i in range(0, 2)] @pytest.mark.django_db diff --git a/awx/main/tests/functional/api/test_schedules.py b/awx/main/tests/functional/api/test_schedules.py index f8fb75a5bc..bdb3534fb3 100644 --- a/awx/main/tests/functional/api/test_schedules.py +++ b/awx/main/tests/functional/api/test_schedules.py @@ -1,7 +1,8 @@ import pytest -from awx.api.versioning import reverse +from django.utils.encoding import smart_str +from awx.api.versioning import reverse from awx.main.models import JobTemplate, Schedule from awx.main.utils.encryption import decrypt_value, get_encryption_key @@ -153,7 +154,7 @@ def test_invalid_rrules(post, admin_user, project, inventory, rrule, error): 'name': 'Some Schedule', 'rrule': rrule, }, admin_user, expect=400) - assert error in resp.content + assert error in smart_str(resp.content) @pytest.mark.django_db @@ -167,7 +168,7 @@ def test_utc_preview(post, admin_user): url = reverse('api:schedule_rrule') r = post(url, {'rrule': get_rrule()}, admin_user, expect=200) assert r.data['utc'] == r.data['local'] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-03-08 05:00:00+00:00', '2030-03-09 05:00:00+00:00', '2030-03-10 05:00:00+00:00', @@ -182,14 +183,14 @@ def test_nyc_with_dst(post, admin_user): r = post(url, {'rrule': get_rrule('America/New_York')}, admin_user, expect=200) # March 10, 2030 is when DST takes effect in NYC - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-03-08 05:00:00-05:00', '2030-03-09 05:00:00-05:00', '2030-03-10 05:00:00-04:00', '2030-03-11 05:00:00-04:00', '2030-03-12 05:00:00-04:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-03-08 10:00:00+00:00', '2030-03-09 10:00:00+00:00', '2030-03-10 09:00:00+00:00', @@ -206,14 +207,14 @@ def test_phoenix_without_dst(post, admin_user): r = post(url, {'rrule': get_rrule('America/Phoenix')}, admin_user, expect=200) # March 10, 2030 is when DST takes effect in NYC - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-03-08 05:00:00-07:00', '2030-03-09 05:00:00-07:00', '2030-03-10 05:00:00-07:00', '2030-03-11 05:00:00-07:00', '2030-03-12 05:00:00-07:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-03-08 12:00:00+00:00', '2030-03-09 12:00:00+00:00', '2030-03-10 12:00:00+00:00', @@ -229,14 +230,14 @@ def test_interval_by_local_day(post, admin_user): r = post(url, {'rrule': rrule}, admin_user, expect=200) # March 10, 2030 is when DST takes effect in NYC - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-02-02 21:00:00-05:00', '2030-03-02 21:00:00-05:00', '2030-04-06 21:00:00-04:00', '2030-05-04 21:00:00-04:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-02-03 02:00:00+00:00', '2030-03-03 02:00:00+00:00', '2030-04-07 01:00:00+00:00', @@ -250,13 +251,13 @@ def test_weekday_timezone_boundary(post, admin_user): rrule = 'DTSTART;TZID=America/New_York:20300101T210000 RRULE:FREQ=WEEKLY;BYDAY=TU;INTERVAL=1;COUNT=3' r = post(url, {'rrule': rrule}, admin_user, expect=200) - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-01-01 21:00:00-05:00', '2030-01-08 21:00:00-05:00', '2030-01-15 21:00:00-05:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-01-02 02:00:00+00:00', '2030-01-09 02:00:00+00:00', '2030-01-16 02:00:00+00:00', @@ -269,13 +270,13 @@ def test_first_monthly_weekday_timezone_boundary(post, admin_user): rrule = 'DTSTART;TZID=America/New_York:20300101T210000 RRULE:FREQ=MONTHLY;BYDAY=SU;BYSETPOS=1;INTERVAL=1;COUNT=3' r = post(url, {'rrule': rrule}, admin_user, expect=200) - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-01-06 21:00:00-05:00', '2030-02-03 21:00:00-05:00', '2030-03-03 21:00:00-05:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-01-07 02:00:00+00:00', '2030-02-04 02:00:00+00:00', '2030-03-04 02:00:00+00:00', @@ -288,13 +289,13 @@ def test_annual_timezone_boundary(post, admin_user): rrule = 'DTSTART;TZID=America/New_York:20301231T230000 RRULE:FREQ=YEARLY;INTERVAL=1;COUNT=3' r = post(url, {'rrule': rrule}, admin_user, expect=200) - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-12-31 23:00:00-05:00', '2031-12-31 23:00:00-05:00', '2032-12-31 23:00:00-05:00', ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2031-01-01 04:00:00+00:00', '2032-01-01 04:00:00+00:00', '2033-01-01 04:00:00+00:00', @@ -312,12 +313,12 @@ def test_dst_phantom_hour(post, admin_user): rrule = 'DTSTART;TZID=America/New_York:20300303T023000 RRULE:FREQ=WEEKLY;BYDAY=SU;INTERVAL=1;COUNT=3' r = post(url, {'rrule': rrule}, admin_user, expect=200) - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-03-03 02:30:00-05:00', '2030-03-17 02:30:00-04:00', # Skip 3/10 because 3/10 @ 2:30AM isn't a real date ] - assert map(str, r.data['utc']) == [ + assert list(map(str, r.data['utc'])) == [ '2030-03-03 07:30:00+00:00', '2030-03-17 06:30:00+00:00', # Skip 3/10 because 3/10 @ 2:30AM isn't a real date ] @@ -330,7 +331,7 @@ def test_months_with_31_days(post, admin_user): r = post(url, {'rrule': rrule}, admin_user, expect=200) # 30 days have September, April, June, and November... - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-01-31 00:00:00-05:00', '2030-03-31 00:00:00-04:00', '2030-05-31 00:00:00-04:00', @@ -350,7 +351,7 @@ def test_dst_rollback_duplicates(post, admin_user): rrule = 'DTSTART;TZID=America/New_York:20301102T233000 RRULE:FREQ=HOURLY;INTERVAL=1;COUNT=5' r = post(url, {'rrule': rrule}, admin_user, expect=200) - assert map(str, r.data['local']) == [ + assert list(map(str, r.data['local'])) == [ '2030-11-02 23:30:00-04:00', '2030-11-03 00:30:00-04:00', '2030-11-03 01:30:00-04:00', diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index e9679f568b..b2103c0777 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -10,7 +10,7 @@ from django.conf import settings from kombu.utils.url import parse_url # Mock -import mock +from unittest import mock # AWX from awx.api.versioning import reverse diff --git a/awx/main/tests/functional/api/test_survey_spec.py b/awx/main/tests/functional/api/test_survey_spec.py index 7eac251132..ef05f220ca 100644 --- a/awx/main/tests/functional/api/test_survey_spec.py +++ b/awx/main/tests/functional/api/test_survey_spec.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest import json diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index b465f92606..64a71c91d3 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -5,7 +5,8 @@ import json import re from django.conf import settings -import mock +from django.utils.encoding import smart_str +from unittest import mock import pytest from awx.api.versioning import reverse @@ -43,7 +44,7 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm url = reverse(view, kwargs={'pk': job.pk}) + '?format=txt' response = get(url, user=admin, expect=200) - assert response.content.splitlines() == ['Testing %d' % i for i in range(3)] + assert smart_str(response.content).splitlines() == ['Testing %d' % i for i in range(3)] @pytest.mark.django_db @@ -69,14 +70,14 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, # ansi codes in ?format=txt should get filtered fmt = "?format={}".format("txt_download" if download else "txt") response = get(url + fmt, user=admin, expect=200) - assert response.content.splitlines() == ['Testing %d' % i for i in range(3)] + assert smart_str(response.content).splitlines() == ['Testing %d' % i for i in range(3)] has_download_header = response.has_header('Content-Disposition') assert has_download_header if download else not has_download_header # ask for ansi and you'll get it fmt = "?format={}".format("ansi_download" if download else "ansi") response = get(url + fmt, user=admin, expect=200) - assert response.content.splitlines() == ['\x1B[0;36mTesting %d\x1B[0m' % i for i in range(3)] + assert smart_str(response.content).splitlines() == ['\x1B[0;36mTesting %d\x1B[0m' % i for i in range(3)] has_download_header = response.has_header('Content-Disposition') assert has_download_header if download else not has_download_header @@ -100,9 +101,9 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view url = reverse(view, kwargs={'pk': job.pk}) + '?format=html' response = get(url, user=admin, expect=200) - assert '.ansi36 { color: #2dbaba; }' in response.content + assert '.ansi36 { color: #2dbaba; }' in smart_str(response.content) for i in range(3): - assert '<span class="ansi36">Testing {}</span>'.format(i) in response.content + assert '<span class="ansi36">Testing {}</span>'.format(i) in smart_str(response.content) @pytest.mark.django_db @@ -120,7 +121,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge url = reverse(view, kwargs={'pk': job.pk}) + '?format=html&start_line=5&end_line=10' response = get(url, user=admin, expect=200) - assert re.findall('Testing [0-9]+', response.content) == ['Testing %d' % i for i in range(5, 10)] + assert re.findall('Testing [0-9]+', smart_str(response.content)) == ['Testing %d' % i for i in range(5, 10)] @pytest.mark.django_db @@ -131,7 +132,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): SystemJobEvent(system_job=job, stdout='Testing {}\n'.format(i), start_line=i).save() url = reverse('api:system_job_detail', kwargs={'pk': job.pk}) response = get(url, user=admin, expect=200) - assert response.data['result_stdout'].splitlines() == ['Testing %d' % i for i in range(3)] + assert smart_str(response.data['result_stdout']).splitlines() == ['Testing %d' % i for i in range(3)] @pytest.mark.django_db @@ -170,7 +171,7 @@ def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fm url = reverse(view, kwargs={'pk': job.pk}) response = get(url + '?format={}'.format(fmt), user=admin, expect=200) - assert response.content == ( + assert smart_str(response.content) == ( 'Standard Output too large to display ({actual} bytes), only download ' 'supported for sizes over {max} bytes.'.format( actual=total_bytes, @@ -179,7 +180,7 @@ def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fm ) response = get(url + '?format={}_download'.format(fmt), user=admin, expect=200) - assert response.content == large_stdout + assert smart_str(response.content) == large_stdout @pytest.mark.django_db @@ -199,7 +200,7 @@ def test_legacy_result_stdout_text_fallback(Cls, view, fmt, get, admin): url = reverse(view, kwargs={'pk': job.pk}) response = get(url + '?format={}'.format(fmt), user=admin, expect=200) - assert response.content == 'LEGACY STDOUT!' + assert smart_str(response.content) == 'LEGACY STDOUT!' @pytest.mark.django_db @@ -219,7 +220,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin): url = reverse(view, kwargs={'pk': job.pk}) response = get(url + '?format={}'.format(fmt), user=admin, expect=200) - assert response.content == ( + assert smart_str(response.content) == ( 'Standard Output too large to display ({actual} bytes), only download ' 'supported for sizes over {max} bytes.'.format( actual=total_bytes, @@ -228,7 +229,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin): ) response = get(url + '?format={}'.format(fmt + '_download'), user=admin, expect=200) - assert response.content == large_stdout + assert smart_str(response.content) == large_stdout @pytest.mark.django_db @@ -248,7 +249,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, url = reverse(view, kwargs={'pk': job.pk}) + '?format=' + fmt response = get(url, user=admin, expect=200) - assert response.content.splitlines() == ['オ%d' % i for i in range(3)] + assert smart_str(response.content).splitlines() == ['オ%d' % i for i in range(3)] @pytest.mark.django_db @@ -256,12 +257,12 @@ def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): job = Job() job.save() for i in range(3): - JobEvent(job=job, stdout=u'オ{}\n'.format(i), start_line=i).save() + JobEvent(job=job, stdout='オ{}\n'.format(i), start_line=i).save() url = reverse( 'api:job_stdout', kwargs={'pk': job.pk} ) + '?format=json&content_encoding=base64' response = get(url, user=admin, expect=200) - content = base64.b64decode(json.loads(response.content)['content']) - assert content.splitlines() == ['オ%d' % i for i in range(3)] + content = base64.b64decode(json.loads(smart_str(response.content))['content']) + assert smart_str(content).splitlines() == ['オ%d' % i for i in range(3)] diff --git a/awx/main/tests/functional/api/test_unified_jobs_view.py b/awx/main/tests/functional/api/test_unified_jobs_view.py index 391cb57d18..711d8fedf4 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_view.py +++ b/awx/main/tests/functional/api/test_unified_jobs_view.py @@ -1,5 +1,7 @@ import pytest +from django.utils.encoding import smart_str + from awx.api.versioning import reverse from awx.main.models import UnifiedJob, ProjectUpdate, InventoryUpdate from awx.main.tests.URI import URI @@ -70,6 +72,7 @@ def test_project_update_redaction_enabled(get, format, content_type, test_cases, assert content_type in response['CONTENT-TYPE'] assert response.data is not None content = response.data['content'] if format == 'json' else response.data + content = smart_str(content) assert test_data['uri'].username not in content assert test_data['uri'].password not in content assert content.count(test_data['uri'].host) == test_data['occurrences'] @@ -82,6 +85,7 @@ def test_job_redaction_disabled(get, format, content_type, negative_test_cases, job = test_data['job'] response = get(reverse("api:job_stdout", kwargs={'pk': job.pk}) + "?format=" + format, user=admin, expect=200, format=format) content = response.data['content'] if format == 'json' else response.data + content = smart_str(content) assert response.data is not None assert test_data['uri'].username in content assert test_data['uri'].password in content diff --git a/awx/main/tests/functional/commands/test_cleanup_facts.py b/awx/main/tests/functional/commands/test_cleanup_facts.py index 35a531fe86..dbfae7a0c5 100644 --- a/awx/main/tests/functional/commands/test_cleanup_facts.py +++ b/awx/main/tests/functional/commands/test_cleanup_facts.py @@ -3,7 +3,7 @@ # Python import pytest -import mock +from unittest import mock from dateutil.relativedelta import relativedelta from datetime import timedelta @@ -48,7 +48,7 @@ def test_cleanup_older_than(fact_scans, hosts, monkeypatch_jsonbfield_get_db_pre hosts(5) fact_scans(28, timestamp_epoch=epoch) qs = Fact.objects.all().order_by('-timestamp') - fact_middle = qs[qs.count() / 2] + fact_middle = qs[int(qs.count() / 2)] granularity = relativedelta() cleanup_facts = CleanupFacts() @@ -92,7 +92,7 @@ def test_cleanup_logic(fact_scans, hosts, monkeypatch_jsonbfield_get_db_prep_sav facts = Fact.objects.filter(host__id=host_id, module=module, timestamp__lt=timestamp_middle).order_by('-timestamp') host_facts[host_id] = facts - for host_id, facts in host_facts.iteritems(): + for host_id, facts in host_facts.items(): assert 15 == len(facts) timestamp_pivot = timestamp_middle @@ -108,7 +108,7 @@ def test_system_tracking_feature_disabled(mocker): cmd = Command() with pytest.raises(CommandError) as err: cmd.handle(None) - assert 'The System Tracking feature is not enabled for your instance' in err.value + assert 'The System Tracking feature is not enabled for your instance' in str(err.value) @mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled) @@ -206,4 +206,4 @@ def test_parameters_fail(mocker): cmd = Command() with pytest.raises(CommandError) as err: cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity']) - assert kv['msg'] in err.value + assert kv['msg'] in str(err.value) diff --git a/awx/main/tests/functional/commands/test_commands.py b/awx/main/tests/functional/commands/test_commands.py index 0b3b582279..471e5f1be6 100644 --- a/awx/main/tests/functional/commands/test_commands.py +++ b/awx/main/tests/functional/commands/test_commands.py @@ -1,10 +1,7 @@ import sys import pytest -try: - from cStringIO import StringIO -except ImportError: - from StringIO import StringIO +from io import StringIO from django.core.management import call_command diff --git a/awx/main/tests/functional/commands/test_expire_sessions.py b/awx/main/tests/functional/commands/test_expire_sessions.py index 91e811bdcf..e13fe527a9 100644 --- a/awx/main/tests/functional/commands/test_expire_sessions.py +++ b/awx/main/tests/functional/commands/test_expire_sessions.py @@ -49,7 +49,7 @@ class TestExpireSessionsCommand: fake_username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) with pytest.raises(CommandError) as excinfo: self.run_command(fake_username) - assert excinfo.value.message.strip() == 'The user does not exist.' + assert str(excinfo.value).strip() == 'The user does not exist.' def test_expire_one_user(self): # alice should be logged out, but bob should not. diff --git a/awx/main/tests/functional/commands/test_inventory_import.py b/awx/main/tests/functional/commands/test_inventory_import.py index 23e80b7403..c3b002c9aa 100644 --- a/awx/main/tests/functional/commands/test_inventory_import.py +++ b/awx/main/tests/functional/commands/test_inventory_import.py @@ -3,7 +3,7 @@ # Python import pytest -import mock +from unittest import mock # Django from django.core.management.base import CommandError @@ -91,25 +91,25 @@ class TestInvalidOptionsFunctional: # Give invalid file to the command cmd = inventory_import.Command() with mock.patch('django.db.transaction.rollback'): - with pytest.raises(IOError) as err: + with pytest.raises(OSError) as err: cmd.handle( inventory_id=inventory.id, source='/tmp/pytest-of-root/pytest-7/inv_files0-invalid') - assert 'Source does not exist' in err.value.message + assert 'Source does not exist' in str(err.value) def test_invalid_inventory_id(self): cmd = inventory_import.Command() with pytest.raises(CommandError) as err: cmd.handle(inventory_id=42, source='/notapath/shouldnotmatter') - assert 'id = 42' in err.value.message - assert 'cannot be found' in err.value.message + assert 'id = 42' in str(err.value) + assert 'cannot be found' in str(err.value) def test_invalid_inventory_name(self): cmd = inventory_import.Command() with pytest.raises(CommandError) as err: cmd.handle(inventory_name='fooservers', source='/notapath/shouldnotmatter') - assert 'name = fooservers' in err.value.message - assert 'cannot be found' in err.value.message + assert 'name = fooservers' in str(err.value) + assert 'cannot be found' in str(err.value) @pytest.mark.django_db diff --git a/awx/main/tests/functional/commands/test_oauth2_token_create.py b/awx/main/tests/functional/commands/test_oauth2_token_create.py index 7b3ceba8e0..5c7a138137 100644 --- a/awx/main/tests/functional/commands/test_oauth2_token_create.py +++ b/awx/main/tests/functional/commands/test_oauth2_token_create.py @@ -2,7 +2,7 @@ import pytest import string import random -import StringIO +from io import StringIO # Django from django.contrib.auth.models import User @@ -13,30 +13,29 @@ from django.core.management.base import CommandError from awx.main.models.oauth import OAuth2AccessToken - @pytest.mark.django_db @pytest.mark.inventory_import class TestOAuth2CreateCommand: def test_no_user_option(self): - out = StringIO.StringIO() + out = StringIO() with pytest.raises(CommandError) as excinfo: call_command('create_oauth2_token', stdout=out) - assert 'Username not supplied.' in excinfo.value.message + assert 'Username not supplied.' in str(excinfo.value) out.close() def test_non_existing_user(self): - out = StringIO.StringIO() + out = StringIO() fake_username = '' while fake_username == '' or User.objects.filter(username=fake_username).exists(): fake_username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) arg = '--user=' + fake_username with pytest.raises(CommandError) as excinfo: call_command('create_oauth2_token', arg, stdout=out) - assert 'The user does not exist.' in excinfo.value.message + assert 'The user does not exist.' in str(excinfo.value) out.close() def test_correct_user(self, alice): - out = StringIO.StringIO() + out = StringIO() arg = '--user=' + 'alice' call_command('create_oauth2_token', arg, stdout=out) generated_token = out.getvalue().strip() diff --git a/awx/main/tests/functional/commands/test_oauth2_token_revoke.py b/awx/main/tests/functional/commands/test_oauth2_token_revoke.py index 0bb355da24..4e576e5558 100644 --- a/awx/main/tests/functional/commands/test_oauth2_token_revoke.py +++ b/awx/main/tests/functional/commands/test_oauth2_token_revoke.py @@ -3,7 +3,7 @@ import datetime import pytest import string import random -import StringIO +from io import StringIO # Django from django.core.management import call_command @@ -19,12 +19,12 @@ from awx.api.versioning import reverse class TestOAuth2RevokeCommand: def test_non_existing_user(self): - out = StringIO.StringIO() + out = StringIO() fake_username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) arg = '--user=' + fake_username with pytest.raises(CommandError) as excinfo: call_command('revoke_oauth2_tokens', arg, stdout=out) - assert 'A user with that username does not exist' in excinfo.value.message + assert 'A user with that username does not exist' in str(excinfo.value) out.close() def test_revoke_all_access_tokens(self, post, admin, alice): diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 1b90225adb..6972428960 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,14 +1,13 @@ # Python import pytest -import mock +from unittest import mock import json import os import six import tempfile import shutil from datetime import timedelta -from six.moves import xrange -from mock import PropertyMock +from unittest.mock import PropertyMock # Django from django.core.urlresolvers import resolve @@ -426,7 +425,7 @@ def org_member(user, organization): def organizations(instance): def rf(organization_count=1): orgs = [] - for i in xrange(0, organization_count): + for i in range(0, organization_count): o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc") orgs.append(o) return orgs @@ -449,7 +448,7 @@ def hosts(group_factory): def rf(host_count=1): hosts = [] - for i in xrange(0, host_count): + for i in range(0, host_count): name = '%s-host-%s' % (group1.name, i) (host, created) = group1.inventory.hosts.get_or_create(name=name) if created: @@ -613,7 +612,7 @@ def fact_scans(group_factory, fact_ansible_json, fact_packages_json, fact_servic facts_json['packages'] = fact_packages_json facts_json['services'] = fact_services_json - for i in xrange(0, fact_scans): + for i in range(0, fact_scans): for host in group1.hosts.all(): for module_name in module_names: facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name])) @@ -764,7 +763,7 @@ def sqlite_copy_expert(request): InventoryUpdateEvent, SystemJobEvent): if cls._meta.db_table == tablename: for event in cls.objects.order_by('start_line').all(): - fd.write(event.stdout.encode('utf-8')) + fd.write(event.stdout) setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout) request.addfinalizer(lambda: shutil.rmtree(path)) diff --git a/awx/main/tests/functional/models/fact/test_get_timeline.py b/awx/main/tests/functional/models/fact/test_get_timeline.py index 7dbcb4bb4e..485af678c9 100644 --- a/awx/main/tests/functional/models/fact/test_get_timeline.py +++ b/awx/main/tests/functional/models/fact/test_get_timeline.py @@ -1,7 +1,6 @@ import pytest from datetime import timedelta -from six.moves import xrange from django.utils import timezone @@ -45,7 +44,7 @@ def test_all_ansible(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save) assert 3 == len(facts_known) assert 3 == len(fact_objs) - for i in xrange(len(facts_known) - 1, 0): + for i in range(len(facts_known) - 1, 0): assert facts_known[i].id == fact_objs[i].id @@ -103,12 +102,12 @@ def test_to_lte(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): ts_to = epoch + timedelta(days=1) (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch) - facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known) + facts_known_subset = list(filter(lambda x: x.timestamp <= ts_to, facts_known)) assert 2 == len(facts_known_subset) assert 2 == len(fact_objs) - for i in xrange(0, len(fact_objs)): + for i in range(0, len(fact_objs)): assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id @@ -118,12 +117,12 @@ def test_from_gt(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): ts_from = epoch (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch) - facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known) + facts_known_subset = list(filter(lambda x: x.timestamp > ts_from, facts_known)) assert 2 == len(facts_known_subset) assert 2 == len(fact_objs) - for i in xrange(0, len(fact_objs)): + for i in range(0, len(fact_objs)): assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id @@ -135,5 +134,5 @@ def test_no_ts(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): assert 3 == len(facts_known) assert 3 == len(fact_objs) - for i in xrange(len(facts_known) - 1, 0): + for i in range(len(facts_known) - 1, 0): assert facts_known[i].id == fact_objs[i].id diff --git a/awx/main/tests/functional/models/test_activity_stream.py b/awx/main/tests/functional/models/test_activity_stream.py index ed0bb78e0e..707b86645c 100644 --- a/awx/main/tests/functional/models/test_activity_stream.py +++ b/awx/main/tests/functional/models/test_activity_stream.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock import json @@ -158,7 +158,7 @@ class TestUserModels: def test_missing_related_on_delete(inventory_source): old_is = InventorySource.objects.get(name=inventory_source.name) inventory_source.inventory.delete() - d = model_to_dict(old_is, serializer_mapping=model_serializer_mapping) + d = model_to_dict(old_is, serializer_mapping=model_serializer_mapping()) assert d['inventory'] == '<missing inventory source>-{}'.format(old_is.inventory_id) @@ -218,7 +218,7 @@ def test_modified_not_allowed_field(somecloud_type): from awx.main.registrar import activity_stream_registrar for Model in activity_stream_registrar.models: - assert 'modified' not in get_allowed_fields(Model(), model_serializer_mapping), Model + assert 'modified' not in get_allowed_fields(Model(), model_serializer_mapping()), Model @pytest.mark.django_db @@ -233,7 +233,7 @@ def test_survey_spec_create_entry(job_template, survey_spec_factory): def test_survey_create_diff(job_template, survey_spec_factory): old = JobTemplate.objects.get(pk=job_template.pk) job_template.survey_spec = survey_spec_factory('foo') - before, after = model_instance_diff(old, job_template, model_serializer_mapping)['survey_spec'] + before, after = model_instance_diff(old, job_template, model_serializer_mapping())['survey_spec'] assert before == '{}' assert json.loads(after) == survey_spec_factory('foo') diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index b5f16e75ce..29c17e1ba7 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from awx.main.models import (Job, JobEvent, ProjectUpdate, ProjectUpdateEvent, diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 537d6d0751..de99b193df 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pytest -import mock +from unittest import mock import six from django.core.exceptions import ValidationError diff --git a/awx/main/tests/functional/models/test_project.py b/awx/main/tests/functional/models/test_project.py index f150dbe00a..719c37436e 100644 --- a/awx/main/tests/functional/models/test_project.py +++ b/awx/main/tests/functional/models/test_project.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.main.models import Project from awx.main.models.organization import Organization diff --git a/awx/main/tests/functional/models/test_schedule.py b/awx/main/tests/functional/models/test_schedule.py index b03e3aab8b..1ccd748f92 100644 --- a/awx/main/tests/functional/models/test_schedule.py +++ b/awx/main/tests/functional/models/test_schedule.py @@ -1,7 +1,7 @@ from datetime import datetime from django.utils.timezone import now -import mock +from unittest import mock import pytest import pytz diff --git a/awx/main/tests/functional/task_management/test_rampart_groups.py b/awx/main/tests/functional/task_management/test_rampart_groups.py index c58869ebf3..22ac1ff111 100644 --- a/awx/main/tests/functional/task_management/test_rampart_groups.py +++ b/awx/main/tests/functional/task_management/test_rampart_groups.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from datetime import timedelta from awx.main.scheduler import TaskManager from awx.main.models import InstanceGroup, WorkflowJob diff --git a/awx/main/tests/functional/task_management/test_scheduler.py b/awx/main/tests/functional/task_management/test_scheduler.py index 299c47a7c6..6a554380be 100644 --- a/awx/main/tests/functional/task_management/test_scheduler.py +++ b/awx/main/tests/functional/task_management/test_scheduler.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock import json from datetime import timedelta diff --git a/awx/main/tests/functional/test_copy.py b/awx/main/tests/functional/test_copy.py index 1b017c8e10..ed03e0f61b 100644 --- a/awx/main/tests/functional/test_copy.py +++ b/awx/main/tests/functional/test_copy.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.api.versioning import reverse from awx.main.utils import decrypt_field diff --git a/awx/main/tests/functional/test_credential.py b/awx/main/tests/functional/test_credential.py index 45daa3eaac..f0b752f1ab 100644 --- a/awx/main/tests/functional/test_credential.py +++ b/awx/main/tests/functional/test_credential.py @@ -206,10 +206,10 @@ def test_vault_validation(organization, inputs, valid): @pytest.mark.django_db -@pytest.mark.parametrize('become_method, valid', zip( +@pytest.mark.parametrize('become_method, valid', list(zip( dict(V1Credential.FIELDS['become_method'].choices).keys(), itertools.repeat(True) -) + [('invalid-choice', False)]) +)) + [('invalid-choice', False)]) def test_choices_validity(become_method, valid, organization): inputs = {'become_method': become_method} cred_type = CredentialType.defaults['ssh']() diff --git a/awx/main/tests/functional/test_dispatch.py b/awx/main/tests/functional/test_dispatch.py index e73f226bbc..a734688512 100644 --- a/awx/main/tests/functional/test_dispatch.py +++ b/awx/main/tests/functional/test_dispatch.py @@ -277,7 +277,7 @@ class TestTaskDispatcher: 'args': [2, 2] }) assert isinstance(result, ValueError) - assert result.message == 'awx.main.tests.functional.test_dispatch.restricted is not decorated with @task()' # noqa + assert str(result) == 'awx.main.tests.functional.test_dispatch.restricted is not decorated with @task()' # noqa def test_method_dispatch(self): result = self.tm.perform_work({ @@ -292,7 +292,7 @@ class TestTaskDispatcher: 'args': [2, 2] }) assert isinstance(result, ValueError) - assert result.message == 'awx.main.tests.functional.test_dispatch.Restricted is not decorated with @task()' # noqa + assert str(result) == 'awx.main.tests.functional.test_dispatch.Restricted is not decorated with @task()' # noqa def test_python_function_cannot_be_imported(self): result = self.tm.perform_work({ @@ -300,14 +300,14 @@ class TestTaskDispatcher: 'args': ['ls'], }) assert isinstance(result, ValueError) - assert result.message == 'os.system is not a valid awx task' # noqa + assert str(result) == 'os.system is not a valid awx task' # noqa def test_undefined_function_cannot_be_imported(self): result = self.tm.perform_work({ 'task': 'awx.foo.bar' }) - assert isinstance(result, ImportError) - assert result.message == 'No module named foo' # noqa + assert isinstance(result, ModuleNotFoundError) + assert str(result) == "No module named 'awx.foo'" # noqa class TestTaskPublisher: diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py index ec38c0598a..404b557227 100644 --- a/awx/main/tests/functional/test_instances.py +++ b/awx/main/tests/functional/test_instances.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate from awx.main.models.ha import Instance, InstanceGroup @@ -71,10 +71,10 @@ def test_instance_dup(org_admin, organization, project, instance_factory, instan project.organization.instance_groups.add(ig_all, ig_dup) actual_num_instances = Instance.objects.active_count() list_response = get(reverse('api:instance_list'), user=system_auditor) - api_num_instances_auditor = list_response.data.items()[0][1] + api_num_instances_auditor = list(list_response.data.items())[0][1] list_response2 = get(reverse('api:instance_list'), user=org_admin) - api_num_instances_oa = list_response2.data.items()[0][1] + api_num_instances_oa = list(list_response2.data.items())[0][1] assert actual_num_instances == api_num_instances_auditor # Note: The org_admin will not see the default 'tower' node because it is not in it's group, as expected diff --git a/awx/main/tests/functional/test_jobs.py b/awx/main/tests/functional/test_jobs.py index fd8070aab7..b04813b278 100644 --- a/awx/main/tests/functional/test_jobs.py +++ b/awx/main/tests/functional/test_jobs.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock import json from awx.main.models import Job, Instance diff --git a/awx/main/tests/functional/test_ldap.py b/awx/main/tests/functional/test_ldap.py index 79e3a8e2ed..9b463da664 100644 --- a/awx/main/tests/functional/test_ldap.py +++ b/awx/main/tests/functional/test_ldap.py @@ -92,7 +92,7 @@ def ldap_settings_generator(): if prefix: data_new = dict() - for k,v in data.iteritems(): + for k,v in data.items(): k_new = k.replace('AUTH_LDAP', 'AUTH_LDAP{}'.format(prefix)) data_new[k_new] = v else: diff --git a/awx/main/tests/functional/test_licenses.py b/awx/main/tests/functional/test_licenses.py index c53dc84ae3..ec4da1ad91 100644 --- a/awx/main/tests/functional/test_licenses.py +++ b/awx/main/tests/functional/test_licenses.py @@ -86,10 +86,10 @@ def test_python_and_js_licenses(): def remediate_licenses_and_requirements(licenses, requirements): errors = [] - items = licenses.keys() + items = list(licenses.keys()) items.sort() for item in items: - if item not in requirements.keys() and item != 'awx': + if item not in [r.lower() for r in requirements.keys()] and item != 'awx': errors.append(" license file %s does not correspond to an existing requirement; it should be removed." % (licenses[item]['filename'],)) continue # uWSGI has a linking exception @@ -101,10 +101,10 @@ def test_python_and_js_licenses(): errors.append(" embedded source for %s is %s instead of the required version %s" % (item, licenses[item]['source_version'], version)) elif licenses[item]['source_version']: errors.append(" embedded source version %s for %s is included despite not being needed" % (licenses[item]['source_version'],item)) - items = requirements.keys() + items = list(requirements.keys()) items.sort() for item in items: - if item not in licenses.keys(): + if item.lower() not in licenses.keys(): errors.append(" license for requirement %s is missing" %(item,)) return errors diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py index 9e659b0adc..b9fc394d12 100644 --- a/awx/main/tests/functional/test_notifications.py +++ b/awx/main/tests/functional/test_notifications.py @@ -1,12 +1,10 @@ -import mock +from unittest import mock import pytest from requests.adapters import HTTPAdapter from requests.utils import select_proxy from requests.exceptions import ConnectionError -from six.moves import xrange - from awx.api.versioning import reverse from awx.main.models.notifications import NotificationTemplate, Notification from awx.main.models.inventory import Inventory, InventorySource @@ -77,7 +75,7 @@ def test_inherited_notification_templates(get, post, user, organization, project u = user('admin-poster', True) url = reverse('api:notification_template_list') notification_templates = [] - for nfiers in xrange(3): + for nfiers in range(3): response = post(url, dict(name="test-webhook-{}".format(nfiers), description="test webhook {}".format(nfiers), diff --git a/awx/main/tests/functional/test_projects.py b/awx/main/tests/functional/test_projects.py index f8466355f1..5bd8e749ec 100644 --- a/awx/main/tests/functional/test_projects.py +++ b/awx/main/tests/functional/test_projects.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import mock # noqa +from unittest import mock # noqa import pytest from awx.api.versioning import reverse diff --git a/awx/main/tests/functional/test_rbac_api.py b/awx/main/tests/functional/test_rbac_api.py index c4114a81b0..725227d5ae 100644 --- a/awx/main/tests/functional/test_rbac_api.py +++ b/awx/main/tests/functional/test_rbac_api.py @@ -1,4 +1,4 @@ -import mock # noqa +from unittest import mock # noqa import pytest from django.db import transaction diff --git a/awx/main/tests/functional/test_rbac_credential.py b/awx/main/tests/functional/test_rbac_credential.py index 2b134c18f5..b114f1b8ba 100644 --- a/awx/main/tests/functional/test_rbac_credential.py +++ b/awx/main/tests/functional/test_rbac_credential.py @@ -1,6 +1,6 @@ import pytest -import mock +from unittest import mock from awx.main.access import CredentialAccess from awx.main.models.credential import Credential diff --git a/awx/main/tests/functional/test_rbac_job_templates.py b/awx/main/tests/functional/test_rbac_job_templates.py index f26843c881..558e1f41f6 100644 --- a/awx/main/tests/functional/test_rbac_job_templates.py +++ b/awx/main/tests/functional/test_rbac_job_templates.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from rest_framework.exceptions import PermissionDenied diff --git a/awx/main/tests/functional/test_rbac_organization.py b/awx/main/tests/functional/test_rbac_organization.py index cdf8f446f0..c3c78cf09e 100644 --- a/awx/main/tests/functional/test_rbac_organization.py +++ b/awx/main/tests/functional/test_rbac_organization.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from awx.main.access import ( diff --git a/awx/main/tests/functional/test_rbac_team.py b/awx/main/tests/functional/test_rbac_team.py index 0ea0851adc..7178769906 100644 --- a/awx/main/tests/functional/test_rbac_team.py +++ b/awx/main/tests/functional/test_rbac_team.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.main.access import TeamAccess from awx.main.models import Project, Organization, Team diff --git a/awx/main/tests/functional/test_rbac_user.py b/awx/main/tests/functional/test_rbac_user.py index 15035740d3..96cb5524d0 100644 --- a/awx/main/tests/functional/test_rbac_user.py +++ b/awx/main/tests/functional/test_rbac_user.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from django.test import TransactionTestCase diff --git a/awx/main/tests/functional/test_reencrypt_migration.py b/awx/main/tests/functional/test_reencrypt_migration.py deleted file mode 100644 index 330cab8b8c..0000000000 --- a/awx/main/tests/functional/test_reencrypt_migration.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2017 Ansible, Inc. -# All Rights Reserved. -import json -import pytest -import mock - -from django.apps import apps - -from awx.main.models import ( - UnifiedJob, - NotificationTemplate, - Credential, -) -from awx.main.models.credential import ssh - -from awx.conf.migrations._reencrypt import encrypt_field -from awx.main.migrations._reencrypt import ( - _notification_templates, - _credentials, - _unified_jobs -) - -from awx.main.utils import decrypt_field - - -@pytest.mark.django_db -def test_notification_template_migration(): - # Doesn't get tagged as UTF8 because the the internal save call explicitly sets skip_utf8=True - with mock.patch('awx.main.models.notifications.encrypt_field', encrypt_field): - nt = NotificationTemplate.objects.create(notification_type='slack', notification_configuration=dict(token='test')) - - - assert nt.notification_configuration['token'].startswith('$encrypted$AES$') - - _notification_templates(apps) - nt.refresh_from_db() - - assert nt.notification_configuration['token'].startswith('$encrypted$AESCBC$') - assert decrypt_field(nt, 'notification_configuration', subfield='token') == 'test' - - # This is here for a side-effect. - # Exception if the encryption type of AESCBC is not properly skipped, ensures - # our `startswith` calls don't have typos - _notification_templates(apps) - - -@pytest.mark.django_db -@pytest.mark.parametrize("old_enc, new_enc, value", [ - ('$encrypted$UTF8$AES', '$encrypted$UTF8$AESCBC$', u'Iñtërnâtiônà lizætiøn'), - ('$encrypted$AES$', '$encrypted$AESCBC$', 'test'), -]) -def test_credential_migration(old_enc, new_enc, value): - with mock.patch('awx.main.models.credential.encrypt_field', encrypt_field): - cred_type = ssh() - cred_type.save() - - cred = Credential.objects.create(credential_type=cred_type, inputs=dict(password=value)) - - assert cred.password.startswith(old_enc) - - _credentials(apps) - cred.refresh_from_db() - - assert cred.password.startswith(new_enc) - assert decrypt_field(cred, 'password') == value - - # This is here for a side-effect. - # Exception if the encryption type of AESCBC is not properly skipped, ensures - # our `startswith` calls don't have typos - _credentials(apps) - - -@pytest.mark.django_db -@pytest.mark.parametrize("old_enc, new_enc, value", [ - ('$encrypted$AES$', '$encrypted$AESCBC$', u'Iñtërnâtiônà lizætiøn'), - ('$encrypted$AES$', '$encrypted$AESCBC$', 'test'), -]) -def test_unified_job_migration(old_enc, new_enc, value): - with mock.patch('awx.main.models.base.encrypt_field', encrypt_field): - uj = UnifiedJob.objects.create(launch_type='manual', start_args=json.dumps({'test':value})) - - assert uj.start_args.startswith(old_enc) - - _unified_jobs(apps) - uj.refresh_from_db() - - assert uj.start_args.startswith(new_enc) - assert json.loads(decrypt_field(uj, 'start_args')) == {'test':value} - - # This is here for a side-effect. - # Exception if the encryption type of AESCBC is not properly skipped, ensures - # our `startswith` calls don't have typos - _unified_jobs(apps) diff --git a/awx/main/tests/functional/test_session.py b/awx/main/tests/functional/test_session.py index 5ce9bfffd6..b30c5cb523 100644 --- a/awx/main/tests/functional/test_session.py +++ b/awx/main/tests/functional/test_session.py @@ -7,7 +7,7 @@ from django.test.utils import override_settings from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.models import Session from django.contrib.auth import SESSION_KEY -import mock +from unittest import mock from awx.api.versioning import reverse diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py index cbb3e281d3..fc7e556460 100644 --- a/awx/main/tests/functional/test_tasks.py +++ b/awx/main/tests/functional/test_tasks.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock import os from django.utils.timezone import now, timedelta diff --git a/awx/main/tests/unit/api/serializers/conftest.py b/awx/main/tests/unit/api/serializers/conftest.py index 137a0083f4..7c26664954 100644 --- a/awx/main/tests/unit/api/serializers/conftest.py +++ b/awx/main/tests/unit/api/serializers/conftest.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest diff --git a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py index d5ee5d906c..664f374483 100644 --- a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py @@ -1,7 +1,7 @@ # Python import pytest -import mock -from mock import PropertyMock +from unittest import mock +from unittest.mock import PropertyMock # AWX from awx.api.serializers import ( diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py index 5688a845ec..e7b0ee7792 100644 --- a/awx/main/tests/unit/api/serializers/test_job_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -1,11 +1,9 @@ # Python from collections import namedtuple import pytest -import mock +from unittest import mock import json -from six.moves import xrange - # AWX from awx.api.serializers import ( JobDetailSerializer, @@ -47,12 +45,12 @@ def job(mocker, job_template, project_update): @pytest.fixture def labels(mocker): - return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] + return [Label(id=x, name='label-%d' % x) for x in range(0, 25)] @pytest.fixture def jobs(mocker): - return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + return [Job(id=x, name='job-%d' % x) for x in range(0, 25)] @mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py index dee880f416..437f6d9404 100644 --- a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -1,8 +1,6 @@ # Python import pytest -import mock - -from six.moves import xrange +from unittest import mock # AWX from awx.api.serializers import ( @@ -41,7 +39,7 @@ def job(mocker, job_template): @pytest.fixture def jobs(mocker): - return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + return [Job(id=x, name='job-%d' % x) for x in range(0, 25)] @mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) diff --git a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py index 4753e343cb..6cec577129 100644 --- a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py @@ -1,6 +1,6 @@ # Python import pytest -import mock +from unittest import mock # AWX from awx.api.serializers import ( diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py index cc53234e97..913413a35f 100644 --- a/awx/main/tests/unit/api/test_filters.py +++ b/awx/main/tests/unit/api/test_filters.py @@ -66,7 +66,7 @@ def test_invalid_field(): field_lookup = FieldLookupBackend() with pytest.raises(ValueError) as excinfo: field_lookup.value_to_python(WorkflowJobTemplate, invalid_field, 'foo') - assert 'is not an allowed field name. Must be ascii encodable.' in excinfo.value.message + assert 'is not an allowed field name. Must be ascii encodable.' in str(excinfo.value) @pytest.mark.parametrize('lookup_suffix', ['', 'contains', 'startswith', 'in']) diff --git a/awx/main/tests/unit/api/test_generics.py b/awx/main/tests/unit/api/test_generics.py index 5ec2f8980c..de7f8ab4c8 100644 --- a/awx/main/tests/unit/api/test_generics.py +++ b/awx/main/tests/unit/api/test_generics.py @@ -1,7 +1,7 @@ # Python import pytest -import mock +from unittest import mock # DRF from rest_framework import status @@ -12,7 +12,6 @@ from rest_framework.exceptions import PermissionDenied from awx.api.generics import ( ParentMixin, SubListCreateAttachDetachAPIView, SubListAttachDetachAPIView, - DeleteLastUnattachLabelMixin, ResourceAccessList, ListAPIView ) @@ -31,13 +30,6 @@ def get_object_or_400(mocker): @pytest.fixture -def mock_response_new(mocker): - m = mocker.patch('awx.api.generics.Response.__new__') - m.return_value = m - return m - - -@pytest.fixture def mock_organization(): return Organization(pk=4, name="Unsaved Org") @@ -76,33 +68,26 @@ class TestSubListCreateAttachDetachAPIView: assert type(res) is Response - def test_attach_create_and_associate(self, mocker, get_object_or_400, parent_relationship_factory, mock_response_new): + def test_attach_create_and_associate(self, mocker, get_object_or_400, parent_relationship_factory): (serializer, mock_parent_relationship) = parent_relationship_factory(SubListCreateAttachDetachAPIView, 'wife') create_return_value = mocker.MagicMock(status_code=status.HTTP_201_CREATED) serializer.create = mocker.Mock(return_value=create_return_value) mock_request = mocker.MagicMock(data=dict()) - ret = serializer.attach(mock_request, None, None) + serializer.attach(mock_request, None, None) - assert ret == mock_response_new serializer.create.assert_called_with(mock_request, None, None) mock_parent_relationship.wife.add.assert_called_with(get_object_or_400.return_value) - mock_response_new.assert_called_with( - Response, create_return_value.data, status=status.HTTP_201_CREATED, - headers={'Location': create_return_value['Location']} - ) - def test_attach_associate_only(self, mocker, get_object_or_400, parent_relationship_factory, mock_response_new): + def test_attach_associate_only(self, mocker, get_object_or_400, parent_relationship_factory): (serializer, mock_parent_relationship) = parent_relationship_factory(SubListCreateAttachDetachAPIView, 'wife') serializer.create = mocker.Mock(return_value=mocker.MagicMock()) mock_request = mocker.MagicMock(data=dict(id=1)) - ret = serializer.attach(mock_request, None, None) + serializer.attach(mock_request, None, None) - assert ret == mock_response_new serializer.create.assert_not_called() mock_parent_relationship.wife.add.assert_called_with(get_object_or_400.return_value) - mock_response_new.assert_called_with(Response, status=status.HTTP_204_NO_CONTENT) def test_unattach_validate_ok(self, mocker): mock_request = mocker.MagicMock(data=dict(id=1)) @@ -183,44 +168,6 @@ def test_attach_detatch_only(mocker): assert 'field is missing' in resp.data['msg'] -class TestDeleteLastUnattachLabelMixin: - @mock.patch('__builtin__.super') - def test_unattach_ok(self, super, mocker): - mock_request = mocker.MagicMock() - mock_sub_id = mocker.MagicMock() - super.return_value = super - super.unattach_validate = mocker.MagicMock(return_value=(mock_sub_id, None)) - super.unattach_by_id = mocker.MagicMock() - - mock_model = mocker.MagicMock() - mock_model.objects.get.return_value = mock_model - mock_model.is_detached.return_value = True - - view = DeleteLastUnattachLabelMixin() - view.model = mock_model - - view.unattach(mock_request, None, None) - - super.unattach_validate.assert_called_with(mock_request) - super.unattach_by_id.assert_called_with(mock_request, mock_sub_id) - mock_model.is_detached.assert_called_with() - mock_model.objects.get.assert_called_with(id=mock_sub_id) - mock_model.delete.assert_called_with() - - @mock.patch('__builtin__.super') - def test_unattach_fail(self, super, mocker): - mock_request = mocker.MagicMock() - mock_response = mocker.MagicMock() - super.return_value = super - super.unattach_validate = mocker.MagicMock(return_value=(None, mock_response)) - view = DeleteLastUnattachLabelMixin() - - res = view.unattach(mock_request, None, None) - - super.unattach_validate.assert_called_with(mock_request) - assert mock_response == res - - class TestParentMixin: def test_get_parent_object(self, mocker, get_object_or_404): parent_mixin = ParentMixin() diff --git a/awx/main/tests/unit/api/test_parsers.py b/awx/main/tests/unit/api/test_parsers.py index 7b3ee6db6b..c2d221af36 100644 --- a/awx/main/tests/unit/api/test_parsers.py +++ b/awx/main/tests/unit/api/test_parsers.py @@ -1,5 +1,5 @@ import pytest -import StringIO +from io import StringIO # AWX from awx.api.parsers import JSONParser @@ -16,14 +16,14 @@ from rest_framework.exceptions import ParseError ] ) def test_jsonparser_valid_input(input_, output): - input_stream = StringIO.StringIO(input_) + input_stream = StringIO(input_) assert JSONParser().parse(input_stream) == output input_stream.close() @pytest.mark.parametrize('invalid_input', ['1', '"foobar"', '3.14', '{"foo": "bar",}']) def test_json_parser_invalid_input(invalid_input): - input_stream = StringIO.StringIO(invalid_input) + input_stream = StringIO(invalid_input) with pytest.raises(ParseError): JSONParser().parse(input_stream) input_stream.close() diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index fb4ce2e301..369dbc6581 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -1,10 +1,9 @@ # -*- coding: utf-8 -*- import re - -import mock import pytest import requests from copy import deepcopy +from unittest import mock from collections import namedtuple @@ -24,13 +23,6 @@ from awx.main.views import handle_error from rest_framework.test import APIRequestFactory -@pytest.fixture -def mock_response_new(mocker): - m = mocker.patch('awx.api.views.Response.__new__') - m.return_value = m - return m - - def test_handle_error(): # Assure that templating of error does not raise errors request = APIRequestFactory().get('/fooooo/') @@ -38,7 +30,7 @@ def test_handle_error(): class TestApiRootView: - def test_get_endpoints(self, mocker, mock_response_new): + def test_get_endpoints(self, mocker): endpoints = [ 'ping', 'config', @@ -72,11 +64,9 @@ class TestApiRootView: ] view = ApiVersionRootView() ret = view.get(mocker.MagicMock()) - - assert ret == mock_response_new - data_arg = mock_response_new.mock_calls[0][1][1] + assert ret.status_code == 200 for endpoint in endpoints: - assert endpoint in data_arg + assert endpoint in ret.data class TestJobTemplateLabelList: diff --git a/awx/main/tests/unit/commands/test_inventory_import.py b/awx/main/tests/unit/commands/test_inventory_import.py index 8bbe219011..105086bcb8 100644 --- a/awx/main/tests/unit/commands/test_inventory_import.py +++ b/awx/main/tests/unit/commands/test_inventory_import.py @@ -20,8 +20,8 @@ class TestInvalidOptions: cmd = Command() with pytest.raises(CommandError) as err: cmd.handle() - assert 'inventory-id' in err.value.message - assert 'required' in err.value.message + assert 'inventory-id' in str(err.value) + assert 'required' in str(err.value) def test_invalid_options_name_and_id(self): # You can not specify both name and if of the inventory @@ -30,8 +30,8 @@ class TestInvalidOptions: cmd.handle( inventory_id=42, inventory_name='my-inventory' ) - assert 'inventory-id' in err.value.message - assert 'exclusive' in err.value.message + assert 'inventory-id' in str(err.value) + assert 'exclusive' in str(err.value) def test_invalid_options_id_and_keep_vars(self): # You can't overwrite and keep_vars at the same time, that wouldn't make sense @@ -40,8 +40,8 @@ class TestInvalidOptions: cmd.handle( inventory_id=42, overwrite=True, keep_vars=True ) - assert 'overwrite-vars' in err.value.message - assert 'exclusive' in err.value.message + assert 'overwrite-vars' in str(err.value) + assert 'exclusive' in str(err.value) def test_invalid_options_id_but_no_source(self): # Need a source to import @@ -50,19 +50,19 @@ class TestInvalidOptions: cmd.handle( inventory_id=42, overwrite=True, keep_vars=True ) - assert 'overwrite-vars' in err.value.message - assert 'exclusive' in err.value.message + assert 'overwrite-vars' in str(err.value) + assert 'exclusive' in str(err.value) with pytest.raises(CommandError) as err: cmd.handle( inventory_id=42, overwrite_vars=True, keep_vars=True ) - assert 'overwrite-vars' in err.value.message - assert 'exclusive' in err.value.message + assert 'overwrite-vars' in str(err.value) + assert 'exclusive' in str(err.value) def test_invalid_options_missing_source(self): cmd = Command() with pytest.raises(CommandError) as err: cmd.handle(inventory_id=42) - assert '--source' in err.value.message - assert 'required' in err.value.message + assert '--source' in str(err.value) + assert 'required' in str(err.value) diff --git a/awx/main/tests/unit/commands/test_replay_job_events.py b/awx/main/tests/unit/commands/test_replay_job_events.py index 36b668a7bf..35042dd840 100644 --- a/awx/main/tests/unit/commands/test_replay_job_events.py +++ b/awx/main/tests/unit/commands/test_replay_job_events.py @@ -3,7 +3,7 @@ # Python import pytest -import mock +from unittest import mock from datetime import timedelta # Django diff --git a/awx/main/tests/unit/conftest.py b/awx/main/tests/unit/conftest.py index cb73ee8c55..a411e3e085 100644 --- a/awx/main/tests/unit/conftest.py +++ b/awx/main/tests/unit/conftest.py @@ -1,7 +1,7 @@ import pytest import logging -from mock import PropertyMock +from unittest.mock import PropertyMock from awx.api.urls import urlpatterns as api_patterns diff --git a/awx/main/tests/unit/expect/test_expect.py b/awx/main/tests/unit/expect/test_expect.py index 5f7a76b139..f3dd395307 100644 --- a/awx/main/tests/unit/expect/test_expect.py +++ b/awx/main/tests/unit/expect/test_expect.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -import StringIO -import mock import os import pytest import re @@ -10,10 +8,13 @@ import stat import tempfile import time from collections import OrderedDict +from io import StringIO +from unittest import mock from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization +from django.utils.encoding import smart_str, smart_bytes from awx.main.expect import run, isolated_manager @@ -32,11 +33,11 @@ def rsa_key(request): backend=default_backend() ) return ( - key.private_bytes( + smart_str(key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.BestAvailableEncryption(passphrase) - ), + encryption_algorithm=serialization.BestAvailableEncryption(smart_bytes(passphrase)) + )), passphrase ) @@ -58,7 +59,7 @@ def mock_sleep(request): def test_simple_spawn(): - stdout = StringIO.StringIO() + stdout = StringIO() status, rc = run.run_pexpect( ['ls', '-la'], HERE, @@ -72,7 +73,7 @@ def test_simple_spawn(): def test_error_rc(): - stdout = StringIO.StringIO() + stdout = StringIO() status, rc = run.run_pexpect( ['ls', '-nonsense'], HERE, @@ -86,7 +87,7 @@ def test_error_rc(): def test_cancel_callback_error(): - stdout = StringIO.StringIO() + stdout = StringIO() def bad_callback(): raise Exception('unique exception') @@ -108,7 +109,7 @@ def test_cancel_callback_error(): @pytest.mark.timeout(3) # https://github.com/ansible/tower/issues/2391#issuecomment-401946895 @pytest.mark.parametrize('value', ['abc123', six.u('Iñtërnâtiônà lizætiøn')]) def test_env_vars(value): - stdout = StringIO.StringIO() + stdout = StringIO() status, rc = run.run_pexpect( ['python', '-c', 'import os; print os.getenv("X_MY_ENV")'], HERE, @@ -122,7 +123,7 @@ def test_env_vars(value): def test_password_prompt(): - stdout = StringIO.StringIO() + stdout = StringIO() expect_passwords = OrderedDict() expect_passwords[re.compile(r'Password:\s*?$', re.M)] = 'secret123' status, rc = run.run_pexpect( @@ -139,7 +140,7 @@ def test_password_prompt(): def test_job_timeout(): - stdout = StringIO.StringIO() + stdout = StringIO() extra_update_fields={} status, rc = run.run_pexpect( ['python', '-c', 'import time; time.sleep(5)'], @@ -156,7 +157,7 @@ def test_job_timeout(): def test_manual_cancellation(): - stdout = StringIO.StringIO() + stdout = StringIO() status, rc = run.run_pexpect( ['python', '-c', 'print raw_input("Password: ")'], HERE, @@ -172,7 +173,7 @@ def test_manual_cancellation(): def test_build_isolated_job_data(private_data_dir, rsa_key): pem, passphrase = rsa_key mgr = isolated_manager.IsolatedManager( - ['ls', '-la'], HERE, {}, StringIO.StringIO(), '' + ['ls', '-la'], HERE, {}, StringIO(), '' ) mgr.private_data_dir = private_data_dir mgr.build_isolated_job_data() @@ -209,7 +210,7 @@ def test_run_isolated_job(private_data_dir, rsa_key): env = {'JOB_ID': '1'} pem, passphrase = rsa_key mgr = isolated_manager.IsolatedManager( - ['ls', '-la'], HERE, env, StringIO.StringIO(), '' + ['ls', '-la'], HERE, env, StringIO(), '' ) mgr.private_data_dir = private_data_dir secrets = { @@ -220,7 +221,7 @@ def test_run_isolated_job(private_data_dir, rsa_key): 'ssh_key_data': pem } mgr.build_isolated_job_data() - stdout = StringIO.StringIO() + stdout = StringIO() # Mock environment variables for callback module with mock.patch('os.getenv') as env_mock: env_mock.return_value = '/path/to/awx/lib' @@ -239,7 +240,7 @@ def test_run_isolated_adhoc_command(private_data_dir, rsa_key): env = {'AD_HOC_COMMAND_ID': '1'} pem, passphrase = rsa_key mgr = isolated_manager.IsolatedManager( - ['pwd'], HERE, env, StringIO.StringIO(), '' + ['pwd'], HERE, env, StringIO(), '' ) mgr.private_data_dir = private_data_dir secrets = { @@ -250,7 +251,7 @@ def test_run_isolated_adhoc_command(private_data_dir, rsa_key): 'ssh_key_data': pem } mgr.build_isolated_job_data() - stdout = StringIO.StringIO() + stdout = StringIO() # Mock environment variables for callback module with mock.patch('os.getenv') as env_mock: env_mock.return_value = '/path/to/awx/lib' @@ -270,7 +271,7 @@ def test_run_isolated_adhoc_command(private_data_dir, rsa_key): def test_check_isolated_job(private_data_dir, rsa_key): pem, passphrase = rsa_key - stdout = StringIO.StringIO() + stdout = StringIO() mgr = isolated_manager.IsolatedManager(['ls', '-la'], HERE, {}, stdout, '') mgr.private_data_dir = private_data_dir mgr.instance = mock.Mock(id=123, pk=123, verbosity=5, spec_set=['id', 'pk', 'verbosity']) @@ -318,51 +319,9 @@ def test_check_isolated_job(private_data_dir, rsa_key): ) -def test_check_isolated_job_with_multibyte_unicode(private_data_dir): - """ - Ensure that multibyte unicode is properly synced when stdout only - contains the first part of the multibyte character - - see: https://github.com/ansible/tower/issues/2315 - """ - def raw_output(): - yield ('failed', '\xe8\xb5\xb7\xe5') # 起 <partial byte> - yield ('successful', '\xe8\xb5\xb7\xe5\x8b\x95') # 起動 - raw_output = raw_output() - stdout = StringIO.StringIO() - mgr = isolated_manager.IsolatedManager(['ls', '-la'], HERE, {}, stdout, '') - mgr.private_data_dir = private_data_dir - mgr.instance = mock.Mock(id=123, pk=123, verbosity=5, spec_set=['id', 'pk', 'verbosity']) - mgr.started_at = time.time() - mgr.host = 'isolated-host' - - os.mkdir(os.path.join(private_data_dir, 'artifacts')) - with mock.patch('awx.main.expect.run.run_pexpect') as run_pexpect: - - def _synchronize_job_artifacts(args, cwd, env, buff, **kw): - buff.write('checking job status...') - status, out = next(raw_output) - for filename, data in ( - ['status', status], - ['rc', '0'], - ['stdout', out] - ): - with open(os.path.join(private_data_dir, 'artifacts', filename), 'w') as f: - f.write(data) - f.flush() - return (status, 0) - - run_pexpect.side_effect = _synchronize_job_artifacts - with mock.patch.object(mgr, '_missing_artifacts') as missing_artifacts: - missing_artifacts.return_value = False - status, rc = mgr.check(interval=0) - - assert stdout.getvalue() == '起動' - - def test_check_isolated_job_timeout(private_data_dir, rsa_key): pem, passphrase = rsa_key - stdout = StringIO.StringIO() + stdout = StringIO() extra_update_fields = {} mgr = isolated_manager.IsolatedManager(['ls', '-la'], HERE, {}, stdout, '', job_timeout=1, diff --git a/awx/main/tests/unit/models/test_events.py b/awx/main/tests/unit/models/test_events.py index 3176e1945c..734d38f449 100644 --- a/awx/main/tests/unit/models/test_events.py +++ b/awx/main/tests/unit/models/test_events.py @@ -1,6 +1,6 @@ from datetime import datetime from django.utils.timezone import utc -import mock +from unittest import mock import pytest from awx.main.models import (JobEvent, ProjectUpdateEvent, AdHocCommandEvent, diff --git a/awx/main/tests/unit/models/test_ha.py b/awx/main/tests/unit/models/test_ha.py index 4ceb83c77a..0e29caf8aa 100644 --- a/awx/main/tests/unit/models/test_ha.py +++ b/awx/main/tests/unit/models/test_ha.py @@ -1,6 +1,6 @@ import pytest -import mock -from mock import Mock +from unittest import mock +from unittest.mock import Mock from awx.main.models import ( Job, diff --git a/awx/main/tests/unit/models/test_inventory.py b/awx/main/tests/unit/models/test_inventory.py index 04afa15894..26ef5e1fa9 100644 --- a/awx/main/tests/unit/models/test_inventory.py +++ b/awx/main/tests/unit/models/test_inventory.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock import json from django.core.exceptions import ValidationError diff --git a/awx/main/tests/unit/models/test_job_template_unit.py b/awx/main/tests/unit/models/test_job_template_unit.py index 1f45705ba4..aafae9ebc9 100644 --- a/awx/main/tests/unit/models/test_job_template_unit.py +++ b/awx/main/tests/unit/models/test_job_template_unit.py @@ -3,7 +3,7 @@ import pytest # AWX from awx.main.models.jobs import JobTemplate -import mock +from unittest import mock def test_missing_project_error(job_template_factory): diff --git a/awx/main/tests/unit/models/test_label.py b/awx/main/tests/unit/models/test_label.py index ecbdcb94fb..94bd9e9d29 100644 --- a/awx/main/tests/unit/models/test_label.py +++ b/awx/main/tests/unit/models/test_label.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.main.models.label import Label from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob diff --git a/awx/main/tests/unit/models/test_survey_models.py b/awx/main/tests/unit/models/test_survey_models.py index e63f428922..30eecff523 100644 --- a/awx/main/tests/unit/models/test_survey_models.py +++ b/awx/main/tests/unit/models/test_survey_models.py @@ -85,6 +85,7 @@ def job(mocker): 'pk': 1, 'job_template.pk': 1, 'job_template.name': '', 'created_by.pk': 1, 'created_by.username': 'admin', 'launch_type': 'manual', + 'verbosity': 1, 'awx_meta_vars.return_value': {}, 'inventory.get_script_data.return_value': {}}) ret.project = mocker.MagicMock(scm_revision='asdf1234') diff --git a/awx/main/tests/unit/models/test_unified_job_unit.py b/awx/main/tests/unit/models/test_unified_job_unit.py index b1525767a9..ba9274d819 100644 --- a/awx/main/tests/unit/models/test_unified_job_unit.py +++ b/awx/main/tests/unit/models/test_unified_job_unit.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from awx.main.models import ( UnifiedJob, diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index 7f427493e1..f904cf3b95 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -6,7 +6,7 @@ from awx.main.models.workflow import ( WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobOptions, WorkflowJob, WorkflowJobNode ) -import mock +from unittest import mock @pytest.fixture @@ -236,4 +236,4 @@ class TestWorkflowJobNodeJobKWARGS: def test_get_ask_mapping_integrity(): - assert WorkflowJobTemplate.get_ask_mapping().keys() == ['extra_vars', 'inventory'] + assert list(WorkflowJobTemplate.get_ask_mapping().keys()) == ['extra_vars', 'inventory'] diff --git a/awx/main/tests/unit/notifications/test_rocketchat.py b/awx/main/tests/unit/notifications/test_rocketchat.py index d621f74ba2..6bf528a93e 100644 --- a/awx/main/tests/unit/notifications/test_rocketchat.py +++ b/awx/main/tests/unit/notifications/test_rocketchat.py @@ -1,4 +1,6 @@ -import mock +import json + +from unittest import mock from django.core.mail.message import EmailMessage import awx.main.notifications.rocketchat_backend as rocketchat_backend @@ -6,6 +8,7 @@ import awx.main.notifications.rocketchat_backend as rocketchat_backend def test_send_messages(): with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock: + requests_mock.post.return_value.status_code = 201 backend = rocketchat_backend.RocketChatBackend() message = EmailMessage('test subject', 'test body', [], ['http://example.com', ]) sent_messages = backend.send_messages([message, ]) @@ -15,24 +18,41 @@ def test_send_messages(): def test_send_messages_with_username(): with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock: + requests_mock.post.return_value.status_code = 201 backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser') message = EmailMessage('test subject', 'test body', [], ['http://example.com', ]) sent_messages = backend.send_messages([message, ]) - requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True) + + calls = requests_mock.post.call_args_list + assert len(calls) == 1 + args, kwargs = calls[0] + assert args[0] == 'http://example.com' + assert json.loads(kwargs['data'])['text'] == 'test subject' + assert json.loads(kwargs['data'])['username'] == 'testuser' + assert kwargs['verify'] is True assert sent_messages == 1 def test_send_messages_with_icon_url(): with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock: + requests_mock.post.return_value.status_code = 201 backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com') message = EmailMessage('test subject', 'test body', [], ['http://example.com', ]) sent_messages = backend.send_messages([message, ]) - requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True) + + calls = requests_mock.post.call_args_list + assert len(calls) == 1 + args, kwargs = calls[0] + assert args[0] == 'http://example.com' + assert json.loads(kwargs['data'])['text'] == 'test subject' + assert json.loads(kwargs['data'])['icon_url'] == 'http://example.com' + assert kwargs['verify'] is True assert sent_messages == 1 def test_send_messages_with_no_verify_ssl(): with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock: + requests_mock.post.return_value.status_code = 201 backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True) message = EmailMessage('test subject', 'test body', [], ['http://example.com', ]) sent_messages = backend.send_messages([message, ]) diff --git a/awx/main/tests/unit/scheduler/test_dag_simple.py b/awx/main/tests/unit/scheduler/test_dag_simple.py index 3494f98f27..f5b8303a9a 100644 --- a/awx/main/tests/unit/scheduler/test_dag_simple.py +++ b/awx/main/tests/unit/scheduler/test_dag_simple.py @@ -14,7 +14,8 @@ def node_generator(): def simple_cycle_1(node_generator): g = SimpleDAG() nodes = [node_generator() for i in range(4)] - map(lambda n: g.add_node(n), nodes) + for n in nodes: + g.add_node(n) r''' 0 diff --git a/awx/main/tests/unit/scheduler/test_dag_workflow.py b/awx/main/tests/unit/scheduler/test_dag_workflow.py index 648a089a79..7f85ba2f85 100644 --- a/awx/main/tests/unit/scheduler/test_dag_workflow.py +++ b/awx/main/tests/unit/scheduler/test_dag_workflow.py @@ -36,7 +36,8 @@ def wf_node_generator(mocker): def workflow_dag_1(wf_node_generator): g = WorkflowDAG() nodes = [wf_node_generator() for i in range(4)] - map(lambda n: g.add_node(n), nodes) + for n in nodes: + g.add_node(n) r''' 0 @@ -67,7 +68,8 @@ class TestWorkflowDAG(): wf_root_nodes = [wf_node_generator() for i in range(0, 10)] wf_leaf_nodes = [wf_node_generator() for i in range(0, 10)] - map(lambda n: g.add_node(n), wf_root_nodes + wf_leaf_nodes) + for n in wf_root_nodes + wf_leaf_nodes: + g.add_node(n) ''' Pair up a root node with a single child via an edge @@ -77,7 +79,8 @@ class TestWorkflowDAG(): | | | C1 C2 Cx ''' - map(lambda (i, n): g.add_edge(wf_root_nodes[i], n, 'label'), enumerate(wf_leaf_nodes)) + for i, n in enumerate(wf_leaf_nodes): + g.add_edge(wf_root_nodes[i], n, 'label') return (g, wf_root_nodes, wf_leaf_nodes) @@ -185,7 +188,8 @@ class TestIsWorkflowDone(): def workflow_dag_canceled(self, wf_node_generator): g = WorkflowDAG() nodes = [wf_node_generator() for i in range(1)] - map(lambda n: g.add_node(n), nodes) + for n in nodes: + g.add_node(n) r''' F0 ''' @@ -252,7 +256,8 @@ class TestBFSNodesToRun(): def workflow_dag_canceled(self, wf_node_generator): g = WorkflowDAG() nodes = [wf_node_generator() for i in range(4)] - map(lambda n: g.add_node(n), nodes) + for n in nodes: + g.add_node(n) r''' C0 / | \ @@ -279,7 +284,8 @@ class TestDocsExample(): def complex_dag(self, wf_node_generator): g = WorkflowDAG() nodes = [wf_node_generator() for i in range(10)] - map(lambda n: g.add_node(n), nodes) + for n in nodes: + g.add_node(n) g.add_edge(nodes[0], nodes[1], "failure_nodes") g.add_edge(nodes[0], nodes[2], "success_nodes") diff --git a/awx/main/tests/unit/test_access.py b/awx/main/tests/unit/test_access.py index 02e863f47f..fb2eb7d403 100644 --- a/awx/main/tests/unit/test_access.py +++ b/awx/main/tests/unit/test_access.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from django.contrib.auth.models import User from django.forms.models import model_to_dict diff --git a/awx/main/tests/unit/test_ha.py b/awx/main/tests/unit/test_ha.py index ebd86cd3a2..f88850895c 100644 --- a/awx/main/tests/unit/test_ha.py +++ b/awx/main/tests/unit/test_ha.py @@ -1,7 +1,7 @@ # Copyright (c) 2016 Ansible, Inc. # Python -import mock +from unittest import mock # AWX from awx.main.ha import is_ha_environment diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 5a9c673312..d5256bf220 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -3,7 +3,7 @@ from contextlib import contextmanager from datetime import datetime from functools import partial -import ConfigParser +import configparser import json import os import re @@ -12,7 +12,7 @@ import tempfile from backports.tempfile import TemporaryDirectory import fcntl -import mock +from unittest import mock import pytest import six import yaml @@ -427,11 +427,11 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun(TestJobExecution): def test_generic_failure(self): - self.task.build_private_data_files = mock.Mock(side_effect=IOError()) + self.task.build_private_data_files = mock.Mock(side_effect=OSError()) with pytest.raises(Exception): self.task.run(self.pk) update_model_call = self.task.update_model.call_args[1] - assert 'IOError' in update_model_call['result_traceback'] + assert 'OSError' in update_model_call['result_traceback'] assert update_model_call['status'] == 'error' assert update_model_call['emitted_events'] == 0 @@ -1131,7 +1131,7 @@ class TestJobCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'rb').read() + shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'r').read() assert shade_config == '\n'.join([ 'clouds:', ' devstack:', @@ -1167,7 +1167,7 @@ class TestJobCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['OVIRT_INI_PATH']) assert config.get('ovirt', 'ovirt_url') == 'some-ovirt-host.example.org' assert config.get('ovirt', 'ovirt_username') == 'bob' @@ -1175,7 +1175,7 @@ class TestJobCredentials(TestJobExecution): if ca_file: assert config.get('ovirt', 'ovirt_ca_file') == ca_file else: - with pytest.raises(ConfigParser.NoOptionError): + with pytest.raises(configparser.NoOptionError): config.get('ovirt', 'ovirt_ca_file') return ['successful', 0] @@ -1209,7 +1209,7 @@ class TestJobCredentials(TestJobExecution): assert env['ANSIBLE_NET_AUTHORIZE'] == expected_authorize if authorize: assert env['ANSIBLE_NET_AUTH_PASS'] == 'authorizeme' - assert open(env['ANSIBLE_NET_SSH_KEYFILE'], 'rb').read() == self.EXAMPLE_PRIVATE_KEY + assert open(env['ANSIBLE_NET_SSH_KEYFILE'], 'r').read() == self.EXAMPLE_PRIVATE_KEY return ['successful', 0] self.run_pexpect.side_effect = run_pexpect_side_effect @@ -1549,7 +1549,7 @@ class TestJobCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - assert open(env['MY_CLOUD_INI_FILE'], 'rb').read() == '[mycloud]\nABC123' + assert open(env['MY_CLOUD_INI_FILE'], 'r').read() == '[mycloud]\nABC123' return ['successful', 0] self.run_pexpect.side_effect = run_pexpect_side_effect @@ -1576,7 +1576,7 @@ class TestJobCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - assert open(env['MY_CLOUD_INI_FILE'], 'rb').read() == value.encode('utf-8') + assert open(env['MY_CLOUD_INI_FILE'], 'r').read() == value return ['successful', 0] self.run_pexpect.side_effect = run_pexpect_side_effect @@ -1619,8 +1619,8 @@ class TestJobCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - assert open(env['MY_CERT_INI_FILE'], 'rb').read() == '[mycert]\nCERT123' - assert open(env['MY_KEY_INI_FILE'], 'rb').read() == '[mykey]\nKEY123' + assert open(env['MY_CERT_INI_FILE'], 'r').read() == '[mycert]\nCERT123' + assert open(env['MY_KEY_INI_FILE'], 'r').read() == '[mykey]\nKEY123' return ['successful', 0] self.run_pexpect.side_effect = run_pexpect_side_effect @@ -1820,7 +1820,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert 'AWS_SECRET_ACCESS_KEY' not in env assert 'EC2_INI_PATH' in env - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['EC2_INI_PATH']) assert 'ec2' in config.sections() return ['successful', 0] @@ -1895,7 +1895,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['AWS_SECRET_ACCESS_KEY'] == 'secret' assert 'EC2_INI_PATH' in env - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['EC2_INI_PATH']) assert 'ec2' in config.sections() return ['successful', 0] @@ -1921,7 +1921,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['VMWARE_INI_PATH']) assert config.get('vmware', 'username') == 'bob' assert config.get('vmware', 'password') == 'secret' @@ -1963,7 +1963,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription' assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar' - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['AZURE_INI_PATH']) assert config.get('azure', 'include_powerstate') == 'yes' assert config.get('azure', 'group_by_resource_group') == 'no' @@ -2008,7 +2008,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['AZURE_PASSWORD'] == 'secret' assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar' - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['AZURE_INI_PATH']) assert config.get('azure', 'include_powerstate') == 'yes' assert config.get('azure', 'group_by_resource_group') == 'no' @@ -2054,7 +2054,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['GCE_INI_PATH']) assert 'cache' in config.sections() assert config.getint('cache', 'cache_max_age') == 0 @@ -2091,7 +2091,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'rb').read() + shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'r').read() assert '\n'.join([ 'clouds:', ' devstack:', @@ -2131,7 +2131,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['FOREMAN_INI_PATH']) assert config.get('foreman', 'url') == 'https://example.org' assert config.get('foreman', 'user') == 'bob' @@ -2168,7 +2168,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() config.read(env['CLOUDFORMS_INI_PATH']) assert config.get('cloudforms', 'url') == 'https://example.org' assert config.get('cloudforms', 'username') == 'bob' @@ -2276,7 +2276,7 @@ def test_os_open_oserror(): def test_fcntl_ioerror(): - with pytest.raises(IOError): + with pytest.raises(OSError): fcntl.flock(99999, fcntl.LOCK_EX) diff --git a/awx/main/tests/unit/test_views.py b/awx/main/tests/unit/test_views.py index 1cab239698..0454b23d7d 100644 --- a/awx/main/tests/unit/test_views.py +++ b/awx/main/tests/unit/test_views.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock # Django REST Framework from rest_framework import exceptions diff --git a/awx/main/tests/unit/utils/test_common.py b/awx/main/tests/unit/utils/test_common.py index b8255faa5d..d2a1fd8b0e 100644 --- a/awx/main/tests/unit/utils/test_common.py +++ b/awx/main/tests/unit/utils/test_common.py @@ -7,7 +7,7 @@ import pytest from uuid import uuid4 import json import yaml -import mock +from unittest import mock from backports.tempfile import TemporaryDirectory from django.conf import settings diff --git a/awx/main/tests/unit/utils/test_encryption.py b/awx/main/tests/unit/utils/test_encryption.py index b269c42b27..f8fa26755b 100644 --- a/awx/main/tests/unit/utils/test_encryption.py +++ b/awx/main/tests/unit/utils/test_encryption.py @@ -10,14 +10,14 @@ def test_encrypt_field(): field = Setting(pk=123, value='ANSIBLE') encrypted = field.value = encryption.encrypt_field(field, 'value') assert encryption.decrypt_field(field, 'value') == 'ANSIBLE' - assert encrypted.startswith('$encrypted$AESCBC$') + assert encrypted.startswith('$encrypted$UTF8$AESCBC$') def test_encrypt_field_without_pk(): field = Setting(value='ANSIBLE') encrypted = field.value = encryption.encrypt_field(field, 'value') assert encryption.decrypt_field(field, 'value') == 'ANSIBLE' - assert encrypted.startswith('$encrypted$AESCBC$') + assert encrypted.startswith('$encrypted$UTF8$AESCBC$') def test_encrypt_field_with_unicode_string(): @@ -28,19 +28,11 @@ def test_encrypt_field_with_unicode_string(): assert encrypted.startswith('$encrypted$UTF8$AESCBC$') -def test_encrypt_field_force_disable_unicode(): - value = u"NothingSpecial" - field = Setting(value=value) - encrypted = field.value = encryption.encrypt_field(field, 'value', skip_utf8=True) - assert "UTF8" not in encrypted - assert encryption.decrypt_field(field, 'value') == value - - def test_encrypt_subfield(): field = Setting(value={'name': 'ANSIBLE'}) encrypted = field.value = encryption.encrypt_field(field, 'value', subfield='name') assert encryption.decrypt_field(field, 'value', subfield='name') == 'ANSIBLE' - assert encrypted.startswith('$encrypted$AESCBC$') + assert encrypted.startswith('$encrypted$UTF8$AESCBC$') def test_encrypt_field_with_ask(): diff --git a/awx/main/tests/unit/utils/test_event_filter.py b/awx/main/tests/unit/utils/test_event_filter.py index fb8f4fa144..e69dd3c507 100644 --- a/awx/main/tests/unit/utils/test_event_filter.py +++ b/awx/main/tests/unit/utils/test_event_filter.py @@ -1,10 +1,9 @@ import pytest import base64 import json -from StringIO import StringIO - -from six.moves import xrange +from io import StringIO +from django.utils.encoding import smart_bytes, smart_str from awx.main.utils import OutputEventFilter, OutputVerboseFilter MAX_WIDTH = 78 @@ -12,14 +11,14 @@ EXAMPLE_UUID = '890773f5-fe6d-4091-8faf-bdc8021d65dd' def write_encoded_event_data(fileobj, data): - b64data = base64.b64encode(json.dumps(data)) + b64data = smart_str(base64.b64encode(smart_bytes(json.dumps(data)))) # pattern corresponding to OutputEventFilter expectation - fileobj.write(u'\x1b[K') - for offset in xrange(0, len(b64data), MAX_WIDTH): + fileobj.write('\x1b[K') + for offset in range(0, len(b64data), MAX_WIDTH): chunk = b64data[offset:offset + MAX_WIDTH] - escaped_chunk = u'{}\x1b[{}D'.format(chunk, len(chunk)) + escaped_chunk = '{}\x1b[{}D'.format(chunk, len(chunk)) fileobj.write(escaped_chunk) - fileobj.write(u'\x1b[K') + fileobj.write('\x1b[K') @pytest.fixture diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py index cd82e0f1f0..374dfbd86c 100644 --- a/awx/main/tests/unit/utils/test_filters.py +++ b/awx/main/tests/unit/utils/test_filters.py @@ -1,7 +1,7 @@ # Python import pytest -import mock +from unittest import mock # AWX from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled @@ -120,7 +120,7 @@ class TestSmartFilterQueryFromString(): def test_invalid_filter_strings(self, mock_get_host_model, filter_string): with pytest.raises(RuntimeError) as e: SmartFilter.query_from_string(filter_string) - assert e.value.message == u"Invalid query " + filter_string + assert str(e.value) == u"Invalid query " + filter_string @pytest.mark.parametrize("filter_string", [ 'created_by__password__icontains=pbkdf2' diff --git a/awx/main/tests/unit/utils/test_handlers.py b/awx/main/tests/unit/utils/test_handlers.py index 9dd06189c0..6fa9b1f992 100644 --- a/awx/main/tests/unit/utils/test_handlers.py +++ b/awx/main/tests/unit/utils/test_handlers.py @@ -1,15 +1,16 @@ # -*- coding: utf-8 -*- import base64 -import cStringIO import logging import socket import datetime from dateutil.tz import tzutc +from io import StringIO from uuid import uuid4 -import mock +from unittest import mock from django.conf import LazySettings +from django.utils.encoding import smart_str import pytest import requests from requests_futures.sessions import FuturesSession @@ -52,7 +53,7 @@ def connection_error_adapter(): @pytest.fixture def fake_socket(tmpdir_factory, request): - sok = socket._socketobject + sok = socket.socket sok.send = mock.MagicMock() sok.connect = mock.MagicMock() sok.setblocking = mock.MagicMock() @@ -255,7 +256,7 @@ def test_https_logging_handler_connection_error(connection_error_adapter, handler.setFormatter(LogstashFormatter()) handler.session.mount('http://', connection_error_adapter) - buff = cStringIO.StringIO() + buff = StringIO() logging.getLogger('awx.main.utils.handlers').addHandler( logging.StreamHandler(buff) ) @@ -308,7 +309,7 @@ def test_https_logging_handler_emit_logstash_with_creds(https_adapter, assert len(https_adapter.requests) == 1 request = https_adapter.requests[0] - assert request.headers['Authorization'] == 'Basic %s' % base64.b64encode("user:pass") + assert request.headers['Authorization'] == 'Basic %s' % smart_str(base64.b64encode(b"user:pass")) def test_https_logging_handler_emit_splunk_with_creds(https_adapter, @@ -331,7 +332,7 @@ def test_https_logging_handler_emit_splunk_with_creds(https_adapter, ({u'测试键': u'测试值'}, '{"测试键": "测试值"}'), ]) def test_encode_payload_for_socket(payload, encoded_payload): - assert _encode_payload_for_socket(payload) == encoded_payload + assert _encode_payload_for_socket(payload).decode('utf-8') == encoded_payload def test_udp_handler_create_socket_at_init(): diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 984f6c0d88..77f3c8e879 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -10,15 +10,14 @@ import os import re import subprocess import stat -import urllib -import urlparse +import urllib.parse import threading import contextlib import tempfile import six import psutil from functools import reduce, wraps -from StringIO import StringIO +from io import StringIO from decimal import Decimal @@ -95,11 +94,12 @@ def to_python_boolean(value, allow_none=False): def region_sorting(region): - if region[1].lower() == 'all': - return -1 - elif region[1].lower().startswith('us'): - return 0 + # python3's removal of sorted(cmp=...) is _stupid_ + if region[1].lower() == 'all': + return '' + elif region[1].lower().startswith('us'): return region[1] + return 'ZZZ' + str(region[1]) def camelcase_to_underscore(s): @@ -177,7 +177,7 @@ def get_ansible_version(): try: proc = subprocess.Popen(['ansible', '--version'], stdout=subprocess.PIPE) - result = proc.communicate()[0] + result = smart_str(proc.communicate()[0]) return result.split('\n')[0].replace('ansible', '').strip() except Exception: return 'unknown' @@ -191,7 +191,7 @@ def get_ssh_version(): try: proc = subprocess.Popen(['ssh', '-V'], stderr=subprocess.PIPE) - result = proc.communicate()[1] + result = smart_str(proc.communicate()[1]) return result.split(" ")[0].split("_")[1] except Exception: return 'unknown' @@ -255,7 +255,7 @@ def update_scm_url(scm_type, url, username=True, password=True, raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type)) if not url.strip(): return '' - parts = urlparse.urlsplit(url) + parts = urllib.parse.urlsplit(url) try: parts.port except ValueError: @@ -281,14 +281,14 @@ def update_scm_url(scm_type, url, username=True, password=True, modified_url = '@'.join(filter(None, [userpass, hostpath])) # git+ssh scheme identifies URLs that should be converted back to # SCP style before passed to git module. - parts = urlparse.urlsplit('git+ssh://%s' % modified_url) + parts = urllib.parse.urlsplit('git+ssh://%s' % modified_url) # Handle local paths specified without file scheme (e.g. /path/to/foo). # Only supported by git and hg. elif scm_type in ('git', 'hg'): if not url.startswith('/'): - parts = urlparse.urlsplit('file:///%s' % url) + parts = urllib.parse.urlsplit('file:///%s' % url) else: - parts = urlparse.urlsplit('file://%s' % url) + parts = urllib.parse.urlsplit('file://%s' % url) else: raise ValueError(_('Invalid %s URL') % scm_type) @@ -334,14 +334,14 @@ def update_scm_url(scm_type, url, username=True, password=True, netloc_password = '' if netloc_username and parts.scheme != 'file' and scm_type != "insights": - netloc = u':'.join([urllib.quote(x,safe='') for x in (netloc_username, netloc_password) if x]) + netloc = u':'.join([urllib.parse.quote(x,safe='') for x in (netloc_username, netloc_password) if x]) else: netloc = u'' netloc = u'@'.join(filter(None, [netloc, parts.hostname])) if parts.port: netloc = u':'.join([netloc, six.text_type(parts.port)]) - new_url = urlparse.urlunsplit([parts.scheme, netloc, parts.path, - parts.query, parts.fragment]) + new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path, + parts.query, parts.fragment]) if scp_format and parts.scheme == 'git+ssh': new_url = new_url.replace('git+ssh://', '', 1).replace('/', ':', 1) return new_url @@ -387,7 +387,7 @@ def _convert_model_field_for_display(obj, field_name, password_fields=None): field_val = json.dumps(field_val, ensure_ascii=False) except Exception: pass - if type(field_val) not in (bool, int, type(None), long): + if type(field_val) not in (bool, int, type(None)): field_val = smart_str(field_val) return field_val @@ -558,7 +558,7 @@ def prefetch_page_capabilities(model, page, prefetch_list, user): display_method = None if type(prefetch_entry) is dict: - display_method = prefetch_entry.keys()[0] + display_method = list(prefetch_entry.keys())[0] paths = prefetch_entry[display_method] else: paths = prefetch_entry @@ -699,7 +699,7 @@ def get_mem_capacity(): forkmem = 100 mem = psutil.virtual_memory().total - return (mem, max(1, ((mem / 1024 / 1024) - 2048) / forkmem)) + return (mem, max(1, ((mem // 1024 // 1024) - 2048) // forkmem)) def get_system_task_capacity(scale=Decimal(1.0), cpu_capacity=None, mem_capacity=None): @@ -966,7 +966,7 @@ def get_custom_venv_choices(): custom_venv_path = settings.BASE_VENV_PATH if os.path.exists(custom_venv_path): return [ - os.path.join(custom_venv_path, x.decode('utf-8'), '') + os.path.join(custom_venv_path, x, '') for x in os.listdir(custom_venv_path) if x != 'awx' and os.path.isdir(os.path.join(custom_venv_path, x)) and @@ -999,6 +999,7 @@ class OutputEventFilter(object): pass def write(self, data): + data = smart_str(data) self._buffer.write(data) # keep a sliding window of the last chunk written so we can detect @@ -1093,7 +1094,7 @@ def is_ansible_variable(key): def extract_ansible_vars(extra_vars): extra_vars = parse_yaml_or_json(extra_vars) ansible_vars = set([]) - for key in extra_vars.keys(): + for key in list(extra_vars.keys()): if is_ansible_variable(key): extra_vars.pop(key) ansible_vars.add(key) diff --git a/awx/main/utils/encryption.py b/awx/main/utils/encryption.py index b8c0cc45a0..92278e94a7 100644 --- a/awx/main/utils/encryption.py +++ b/awx/main/utils/encryption.py @@ -6,7 +6,7 @@ from collections import namedtuple import six from cryptography.fernet import Fernet, InvalidToken from cryptography.hazmat.backends import default_backend -from django.utils.encoding import smart_str +from django.utils.encoding import smart_str, smart_bytes __all__ = ['get_encryption_key', @@ -47,10 +47,10 @@ def get_encryption_key(field_name, pk=None): ''' from django.conf import settings h = hashlib.sha512() - h.update(settings.SECRET_KEY) + h.update(smart_bytes(settings.SECRET_KEY)) if pk is not None: - h.update(str(pk)) - h.update(field_name) + h.update(smart_bytes(str(pk))) + h.update(smart_bytes(field_name)) return base64.urlsafe_b64encode(h.digest()) @@ -59,29 +59,23 @@ def encrypt_value(value, pk=None): return encrypt_field(TransientField(pk=pk, value=value), 'value') -def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=False): +def encrypt_field(instance, field_name, ask=False, subfield=None): ''' Return content of the given instance and field name encrypted. ''' value = getattr(instance, field_name) if isinstance(value, dict) and subfield is not None: value = value[subfield] + if value is None: + return None + value = smart_str(value) if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'): return value - if skip_utf8: - utf8 = False - else: - utf8 = type(value) == six.text_type - value = smart_str(value) key = get_encryption_key(field_name, getattr(instance, 'pk', None)) f = Fernet256(key) - encrypted = f.encrypt(value) - b64data = base64.b64encode(encrypted) - tokens = ['$encrypted', 'AESCBC', b64data] - if utf8: - # If the value to encrypt is utf-8, we need to add a marker so we - # know to decode the data when it's decrypted later - tokens.insert(1, 'UTF8') + encrypted = f.encrypt(smart_bytes(value)) + b64data = smart_str(base64.b64encode(encrypted)) + tokens = ['$encrypted', 'UTF8', 'AESCBC', b64data] return '$'.join(tokens) @@ -97,10 +91,7 @@ def decrypt_value(encryption_key, value): encrypted = base64.b64decode(b64data) f = Fernet256(encryption_key) value = f.decrypt(encrypted) - # If the encrypted string contained a UTF8 marker, decode the data - if utf8: - value = value.decode('utf-8') - return value + return smart_str(value) def decrypt_field(instance, field_name, subfield=None): @@ -110,12 +101,13 @@ def decrypt_field(instance, field_name, subfield=None): value = getattr(instance, field_name) if isinstance(value, dict) and subfield is not None: value = value[subfield] + value = smart_str(value) if not value or not value.startswith('$encrypted$'): return value key = get_encryption_key(field_name, getattr(instance, 'pk', None)) try: - return decrypt_value(key, value) + return smart_str(decrypt_value(key, value)) except InvalidToken: logger.exception( "Failed to decrypt `%s(pk=%s).%s`; if you've recently restored from " diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index eaf8c805b6..9cb1c11416 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -8,7 +8,7 @@ from pyparsing import ( CharsNotIn, ParseException, ) -from logging import Filter, _levelNames +from logging import Filter, _nameToLevel import six @@ -91,8 +91,7 @@ class ExternalLoggerEnabled(Filter): return False # Level enablement - if record.levelno < _levelNames[self.lvl]: - # logging._levelNames -> logging._nameToLevel in python 3 + if record.levelno < _nameToLevel[self.lvl]: return False # Logger type enablement @@ -206,7 +205,7 @@ class SmartFilter(object): elif type(last_v) is list: last_v.append(new_kv) elif type(last_v) is dict: - last_kv[last_kv.keys()[0]] = new_kv + last_kv[list(last_kv.keys())[0]] = new_kv last_v = new_v last_kv = new_kv @@ -216,7 +215,7 @@ class SmartFilter(object): if type(last_v) is list: last_v.append(v) elif type(last_v) is dict: - last_kv[last_kv.keys()[0]] = v + last_kv[list(last_kv.keys())[0]] = v return (assembled_k, assembled_v) diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index 3972815dc5..0c77e65f09 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -6,10 +6,10 @@ import logging import json import requests import time -import urlparse import socket import select import six +from urllib import parse as urlparse from concurrent.futures import ThreadPoolExecutor from requests.exceptions import RequestException @@ -334,7 +334,7 @@ class AWXProxyHandler(logging.Handler): handler = self.get_handler(custom_settings=custom_settings, force_create=True) handler.setFormatter(LogstashFormatter()) logger = logging.getLogger(__file__) - fn, lno, func = logger.findCaller() + fn, lno, func, _ = logger.findCaller() record = logger.makeRecord('awx', 10, fn, lno, 'AWX Connection Test', tuple(), None, func) diff --git a/awx/main/utils/mem_inventory.py b/awx/main/utils/mem_inventory.py index b2d1c0d691..fd406b91d9 100644 --- a/awx/main/utils/mem_inventory.py +++ b/awx/main/utils/mem_inventory.py @@ -171,7 +171,7 @@ class MemInventory(object): return self.all_group.all_groups[name] def delete_empty_groups(self): - for name, group in self.all_group.all_groups.items(): + for name, group in list(self.all_group.all_groups.items()): if not group.children and not group.hosts and not group.variables: logger.debug('Removing empty group %s', name) for parent in group.parents: @@ -236,7 +236,7 @@ def dict_to_mem_data(data, inventory=None): _meta = data.pop('_meta', {}) - for k,v in data.iteritems(): + for k,v in data.items(): group = inventory.get_group(k) if not group: continue @@ -246,7 +246,7 @@ def dict_to_mem_data(data, inventory=None): # Process hosts within a group. hosts = v.get('hosts', {}) if isinstance(hosts, dict): - for hk, hv in hosts.iteritems(): + for hk, hv in hosts.items(): host = inventory.get_host(hk) if not host: continue @@ -303,7 +303,7 @@ def dict_to_mem_data(data, inventory=None): inventory.all_group.add_child_group(group) if _meta: - for k,v in inventory.all_group.all_hosts.iteritems(): + for k,v in inventory.all_group.all_hosts.items(): meta_hostvars = _meta['hostvars'].get(k, {}) if isinstance(meta_hostvars, dict): v.variables.update(meta_hostvars) diff --git a/awx/main/utils/named_url_graph.py b/awx/main/utils/named_url_graph.py index 1f813d8269..d48f14cfc7 100644 --- a/awx/main/utils/named_url_graph.py +++ b/awx/main/utils/named_url_graph.py @@ -125,8 +125,8 @@ class GraphNode(object): evolving_prefix = '__'.join(prefixes) for attr_name, attr_value in zip(stack[-1].fields, named_url_parts): attr_name = ("__%s" % attr_name) if evolving_prefix else attr_name - if isinstance(attr_value, six.binary_type): - attr_value = six.moves.urllib.parse.unquote(attr_value).decode(encoding='utf-8') + if isinstance(attr_value, str): + attr_value = six.moves.urllib.parse.unquote(attr_value) kwargs[evolving_prefix + attr_name] = attr_value idx += 1 if stack[-1].counter >= len(stack[-1].adj_list): @@ -304,7 +304,7 @@ def generate_graph(models): candidate_nodes[model].append([fields, fk_names]) if model not in candidate_nodes: dead_ends.add(model) - candidate_nodes = candidate_nodes.items() + candidate_nodes = list(candidate_nodes.items()) largest_graph = {} for configuration in _generate_configurations(candidate_nodes): candidate_graph = _generate_single_graph(configuration, dead_ends) diff --git a/awx/main/validators.py b/awx/main/validators.py index 53e15cecf2..879be056e5 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -117,7 +117,7 @@ def validate_pem(data, min_keys=0, max_keys=None, min_certs=0, max_certs=None): # Decoded key data starts with magic string (null-terminated), four byte # length field, followed by the ciphername -- if ciphername is anything # other than 'none' the key is encrypted. - pem_obj_info['key_enc'] = not bool(pem_obj_info['bin'].startswith('openssh-key-v1\x00\x00\x00\x00\x04none')) + pem_obj_info['key_enc'] = not bool(pem_obj_info['bin'].startswith(b'openssh-key-v1\x00\x00\x00\x00\x04none')) elif match.group('type') == 'ENCRYPTED PRIVATE KEY': pem_obj_info['key_enc'] = True elif pem_obj_info.get('key_type', ''): diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index b882517b7d..8107ae29e4 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -40,7 +40,7 @@ def IS_TESTING(argv=None): if "pytest" in sys.modules: - import mock + from unittest import mock with mock.patch('__main__.__builtins__.dir', return_value=[]): import ldap else: diff --git a/awx/settings/development.py b/awx/settings/development.py index 3f219c6fe0..3fd61b899d 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -19,7 +19,7 @@ import mimetypes from split_settings.tools import optional, include # Load default settings. -from defaults import * # NOQA +from .defaults import * # NOQA # don't use memcache when running tests if "pytest" in sys.modules: diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index acc0a5cd39..bea9e4611f 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -12,39 +12,9 @@ # MISC PROJECT SETTINGS ############################################################################### import os -import urllib +import urllib.parse import sys - -def patch_broken_pipe_error(): - """Monkey Patch BaseServer.handle_error to not write - a stacktrace to stderr on broken pipe. - http://stackoverflow.com/a/22618740/362702""" - import sys - from SocketServer import BaseServer - from wsgiref import handlers - - handle_error = BaseServer.handle_error - log_exception = handlers.BaseHandler.log_exception - - def is_broken_pipe_error(): - type, err, tb = sys.exc_info() - return "Connection reset by peer" in repr(err) - - def my_handle_error(self, request, client_address): - if not is_broken_pipe_error(): - handle_error(self, request, client_address) - - def my_log_exception(self, exc_info): - if not is_broken_pipe_error(): - log_exception(self, exc_info) - - BaseServer.handle_error = my_handle_error - handlers.BaseHandler.log_exception = my_log_exception - -patch_broken_pipe_error() - - ADMINS = ( # ('Your Name', 'your_email@domain.com'), ) @@ -83,7 +53,7 @@ if "pytest" in sys.modules: BROKER_URL = "amqp://{}:{}@{}/{}".format(os.environ.get("RABBITMQ_USER"), os.environ.get("RABBITMQ_PASS"), os.environ.get("RABBITMQ_HOST"), - urllib.quote(os.environ.get("RABBITMQ_VHOST", "/"), safe='')) + urllib.parse.quote(os.environ.get("RABBITMQ_VHOST", "/"), safe='')) CHANNEL_LAYERS = { 'default': {'BACKEND': 'asgi_amqp.AMQPChannelLayer', @@ -236,7 +206,7 @@ LOGGING['handlers']['management_playbooks'] = {'class': 'logging.NullHandler'} try: path = os.path.expanduser(os.path.expandvars('~/.ssh/id_rsa')) - TEST_SSH_KEY_DATA = file(path, 'rb').read() + TEST_SSH_KEY_DATA = open(path, 'rb').read() except IOError: TEST_SSH_KEY_DATA = '' diff --git a/awx/settings/production.py b/awx/settings/production.py index 77fe12156d..a03748daff 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -14,7 +14,7 @@ import traceback from split_settings.tools import optional, include # Load default settings. -from defaults import * # NOQA +from .defaults import * # NOQA DEBUG = False TEMPLATE_DEBUG = DEBUG @@ -95,7 +95,7 @@ except IOError: try: e = None open(settings_file) - except IOError as e: + except IOError: pass if e and e.errno == errno.EACCES: SECRET_KEY = 'permission-denied' diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index aa65d65a11..a116066211 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -8,19 +8,23 @@ import threading xmlsec_init_lock = threading.Lock() xmlsec_initialized = False -import dm.xmlsec.binding # noqa -original_xmlsec_initialize = dm.xmlsec.binding.initialize - -def xmlsec_initialize(*args, **kwargs): - global xmlsec_init_lock, xmlsec_initialized, original_xmlsec_initialize - with xmlsec_init_lock: - if not xmlsec_initialized: - original_xmlsec_initialize(*args, **kwargs) - xmlsec_initialized = True - - -dm.xmlsec.binding.initialize = xmlsec_initialize +# +# TODO: THIS DOES NOT WORK IN PY3 +# +#import dm.xmlsec.binding # noqa +#original_xmlsec_initialize = dm.xmlsec.binding.initialize +# +# +#def xmlsec_initialize(*args, **kwargs): +# global xmlsec_init_lock, xmlsec_initialized, original_xmlsec_initialize +# with xmlsec_init_lock: +# if not xmlsec_initialized: +# original_xmlsec_initialize(*args, **kwargs) +# xmlsec_initialized = True +# +# +#dm.xmlsec.binding.initialize = xmlsec_initialize default_app_config = 'awx.sso.apps.SSOConfig' diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 7b4bcfbafc..4f4f9bea4f 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -40,11 +40,11 @@ logger = logging.getLogger('awx.sso.backends') class LDAPSettings(BaseLDAPSettings): - defaults = dict(BaseLDAPSettings.defaults.items() + { + defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({ 'ORGANIZATION_MAP': {}, 'TEAM_MAP': {}, 'GROUP_TYPE_PARAMS': {}, - }.items()) + }.items())) def __init__(self, prefix='AUTH_LDAP_', defaults={}): super(LDAPSettings, self).__init__(prefix, defaults) @@ -117,7 +117,7 @@ class LDAPBackend(BaseLDAPBackend): raise ImproperlyConfigured( "{} must be an {} instance.".format(setting_name, type_) ) - return super(LDAPBackend, self).authenticate(username, password) + return super(LDAPBackend, self).authenticate(None, username, password) except Exception: logger.exception("Encountered an error authenticating to LDAP") return None @@ -198,7 +198,7 @@ class RADIUSBackend(BaseRADIUSBackend): if not feature_enabled('enterprise_auth'): logger.error("Unable to authenticate, license does not support RADIUS authentication") return None - return super(RADIUSBackend, self).authenticate(username, password) + return super(RADIUSBackend, self).authenticate(None, username, password) def get_user(self, user_id): if not django_settings.RADIUS_SERVER: @@ -228,16 +228,16 @@ class TACACSPlusBackend(object): try: # Upstream TACACS+ client does not accept non-string, so convert if needed. auth = tacacs_plus.TACACSClient( - django_settings.TACACSPLUS_HOST.encode('utf-8'), + django_settings.TACACSPLUS_HOST, django_settings.TACACSPLUS_PORT, - django_settings.TACACSPLUS_SECRET.encode('utf-8'), + django_settings.TACACSPLUS_SECRET, timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT, ).authenticate( - username.encode('utf-8'), password.encode('utf-8'), + username, password, authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL], ) except Exception as e: - logger.exception("TACACS+ Authentication Error: %s" % (e.message,)) + logger.exception("TACACS+ Authentication Error: %s" % str(e)) return None if auth.valid: return _get_or_set_enterprise_user(username, password, 'tacacs+') @@ -341,8 +341,10 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True): if ldap_user._get_groups().is_member_of(group_dn): should_add = True if should_add: + user.save() rel.add(user) elif remove and user in rel.all(): + user.save() rel.remove(user) @@ -398,6 +400,7 @@ def on_populate_user(sender, **kwargs): remove) # Update user profile to store LDAP DN. + user.save() profile = user.profile if profile.ldap_dn != ldap_user.dn: profile.ldap_dn = ldap_user.dn diff --git a/awx/sso/conf.py b/awx/sso/conf.py index dc9a777782..7938b3aae7 100644 --- a/awx/sso/conf.py +++ b/awx/sso/conf.py @@ -1,6 +1,6 @@ # Python import collections -import urlparse +import urllib.parse as urlparse # Django from django.conf import settings @@ -461,7 +461,7 @@ register( register( 'RADIUS_SECRET', - field_class=fields.RADIUSSecretField, + field_class=fields.CharField, allow_blank=True, default='', label=_('RADIUS Secret'), diff --git a/awx/sso/fields.py b/awx/sso/fields.py index a240e368aa..c378891dc4 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -51,7 +51,7 @@ class DependsOnMixin(): Then fall back to the raw value from the setting in the DB. """ from django.conf import settings - dependent_key = iter(self.depends_on).next() + dependent_key = next(iter(self.depends_on)) if self.context: request = self.context.get('request', None) @@ -160,7 +160,7 @@ class AuthenticationBackendsField(fields.StringListField): if not required_feature or feature_enabled(required_feature): if all([getattr(settings, rs, None) for rs in required_settings]): continue - backends = filter(lambda x: x != backend, backends) + backends = [x for x in backends if x != backend] return backends @@ -198,7 +198,8 @@ class LDAPConnectionOptionsField(fields.DictField): valid_options = dict([(v, k) for k, v in ldap.OPT_NAMES_DICT.items()]) invalid_options = set(data.keys()) - set(valid_options.keys()) if invalid_options: - options_display = json.dumps(list(invalid_options)).lstrip('[').rstrip(']') + invalid_options = sorted(list(invalid_options)) + options_display = json.dumps(invalid_options).lstrip('[').rstrip(']') self.fail('invalid_options', invalid_options=options_display) # Convert named options to their integer constants. internal_data = {} @@ -224,7 +225,7 @@ class LDAPDNListField(fields.StringListField): def __init__(self, **kwargs): super(LDAPDNListField, self).__init__(**kwargs) - self.validators.append(lambda dn: map(validate_ldap_dn, dn)) + self.validators.append(lambda dn: list(map(validate_ldap_dn, dn))) def run_validation(self, data=empty): if not isinstance(data, (list, tuple)): @@ -338,7 +339,7 @@ class LDAPSearchUnionField(fields.ListField): data = super(LDAPSearchUnionField, self).to_internal_value(data) if len(data) == 0: return None - if len(data) == 3 and isinstance(data[0], basestring): + if len(data) == 3 and isinstance(data[0], str): return self.ldap_search_field_class().run_validation(data) else: search_args = [] @@ -367,7 +368,8 @@ class LDAPUserAttrMapField(fields.DictField): data = super(LDAPUserAttrMapField, self).to_internal_value(data) invalid_attrs = (set(data.keys()) - self.valid_user_attrs) if invalid_attrs: - attrs_display = json.dumps(list(invalid_attrs)).lstrip('[').rstrip(']') + invalid_attrs = sorted(list(invalid_attrs)) + attrs_display = json.dumps(invalid_attrs).lstrip('[').rstrip(']') self.fail('invalid_attrs', invalid_attrs=attrs_display) return data @@ -432,7 +434,8 @@ class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin): invalid_keys = set(value.keys()) - set(inspect.getargspec(group_type_cls.__init__).args[1:]) if invalid_keys: - keys_display = json.dumps(list(invalid_keys)).lstrip('[').rstrip(']') + invalid_keys = sorted(list(invalid_keys)) + keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') self.fail('invalid_keys', invalid_keys=keys_display) return value @@ -491,13 +494,16 @@ class BaseDictWithChildField(fields.DictField): continue elif key not in data: missing_keys.add(key) + missing_keys = sorted(list(missing_keys)) if missing_keys and (data or not self.allow_blank): - keys_display = json.dumps(list(missing_keys)).lstrip('[').rstrip(']') + missing_keys = sorted(list(missing_keys)) + keys_display = json.dumps(missing_keys).lstrip('[').rstrip(']') self.fail('missing_keys', missing_keys=keys_display) if not self.allow_unknown_keys: invalid_keys = set(data.keys()) - set(self.child_fields.keys()) if invalid_keys: - keys_display = json.dumps(list(invalid_keys)).lstrip('[').rstrip(']') + invalid_keys = sorted(list(invalid_keys)) + keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') self.fail('invalid_keys', invalid_keys=keys_display) for k, v in data.items(): child_field = self.child_fields.get(k, None) @@ -544,21 +550,6 @@ class LDAPTeamMapField(fields.DictField): child = LDAPSingleTeamMapField() -class RADIUSSecretField(fields.CharField): - - def run_validation(self, data=empty): - value = super(RADIUSSecretField, self).run_validation(data) - if isinstance(value, unicode): - value = value.encode('utf-8') - return value - - def to_internal_value(self, value): - value = super(RADIUSSecretField, self).to_internal_value(value) - if isinstance(value, unicode): - value = value.encode('utf-8') - return value - - class SocialMapStringRegexField(fields.CharField): def to_representation(self, value): @@ -605,7 +596,7 @@ class SocialMapField(fields.ListField): return False elif value in fields.NullBooleanField.NULL_VALUES: return None - elif isinstance(value, (basestring, type(re.compile('')))): + elif isinstance(value, (str, type(re.compile('')))): return self.child.to_representation(value) else: self.fail('type_error', input_type=type(value)) @@ -619,7 +610,7 @@ class SocialMapField(fields.ListField): return False elif data in fields.NullBooleanField.NULL_VALUES: return None - elif isinstance(data, basestring): + elif isinstance(data, str): return self.child.run_validation(data) else: self.fail('type_error', input_type=type(data)) @@ -688,7 +679,8 @@ class SAMLOrgInfoField(fields.DictField): if not re.match(r'^[a-z]{2}(?:-[a-z]{2})??$', key, re.I): invalid_keys.add(key) if invalid_keys: - keys_display = json.dumps(list(invalid_keys)).lstrip('[').rstrip(']') + invalid_keys = sorted(list(invalid_keys)) + keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') self.fail('invalid_lang_code', invalid_lang_codes=keys_display) return data diff --git a/awx/sso/middleware.py b/awx/sso/middleware.py index 015e8fdd09..4e1cf1b1aa 100644 --- a/awx/sso/middleware.py +++ b/awx/sso/middleware.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Python -import urllib +import urllib.parse # Six import six @@ -40,7 +40,7 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware): # see: https://github.com/ansible/tower/issues/1979 utils.BACKENDS = settings.AUTHENTICATION_BACKENDS token_key = request.COOKIES.get('token', '') - token_key = urllib.quote(urllib.unquote(token_key).strip('"')) + token_key = urllib.parse.quote(urllib.parse.unquote(token_key).strip('"')) if not hasattr(request, 'successful_authenticator'): request.successful_authenticator = None diff --git a/awx/sso/migrations/0002_expand_provider_options.py b/awx/sso/migrations/0002_expand_provider_options.py index aff1b3d6f1..600b244efc 100644 --- a/awx/sso/migrations/0002_expand_provider_options.py +++ b/awx/sso/migrations/0002_expand_provider_options.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='userenterpriseauth', name='provider', - field=models.CharField(max_length=32, choices=[(b'radius', 'RADIUS'), (b'tacacs+', 'TACACS+'), (b'saml', 'SAML')]), + field=models.CharField(max_length=32, choices=[('radius', 'RADIUS'), ('tacacs+', 'TACACS+'), ('saml', 'SAML')]), ), ] diff --git a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py index 2cc914667c..b15c0b4e9a 100644 --- a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py +++ b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py @@ -1,6 +1,6 @@ # Python import pytest -import mock +from unittest import mock # Tower from awx.sso.backends import _get_or_set_enterprise_user diff --git a/awx/sso/tests/functional/test_pipeline.py b/awx/sso/tests/functional/test_pipeline.py index 57a6eed7bb..1552aa10f7 100644 --- a/awx/sso/tests/functional/test_pipeline.py +++ b/awx/sso/tests/functional/test_pipeline.py @@ -1,7 +1,7 @@ import pytest -import mock import re +from unittest import mock from awx.sso.pipeline import ( update_user_orgs, diff --git a/awx/sso/tests/unit/test_fields.py b/awx/sso/tests/unit/test_fields.py index f6afabc5ba..fbd3a04781 100644 --- a/awx/sso/tests/unit/test_fields.py +++ b/awx/sso/tests/unit/test_fields.py @@ -1,6 +1,6 @@ import pytest -import mock +from unittest import mock from rest_framework.exceptions import ValidationError @@ -37,7 +37,7 @@ class TestSAMLOrgAttrField(): ({'remove': True, 'saml_attr': False}, ValidationError('Not a valid string.')), ({'remove': True, 'saml_attr': False, 'foo': 'bar', 'gig': 'ity'}, - ValidationError('Invalid key(s): "gig", "foo".')), + ValidationError('Invalid key(s): "foo", "gig".')), ({'remove_admins': True, 'saml_admin_attr': False}, ValidationError('Not a valid string.')), ({'remove_admins': 'blah', 'saml_admin_attr': 'foobar'}, diff --git a/awx/sso/tests/unit/test_tacacsplus.py b/awx/sso/tests/unit/test_tacacsplus.py index 7776baf3dd..e048078571 100644 --- a/awx/sso/tests/unit/test_tacacsplus.py +++ b/awx/sso/tests/unit/test_tacacsplus.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock def test_empty_host_fails_auth(tacacsplus_backend): diff --git a/awx/sso/views.py b/awx/sso/views.py index 5fb993a794..9a95767825 100644 --- a/awx/sso/views.py +++ b/awx/sso/views.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Python -import urllib +import urllib.parse import logging # Django @@ -24,7 +24,7 @@ class BaseRedirectView(RedirectView): def get_redirect_url(self, *args, **kwargs): last_path = self.request.COOKIES.get('lastPath', '') - last_path = urllib.quote(urllib.unquote(last_path).strip('"')) + last_path = urllib.parse.quote(urllib.parse.unquote(last_path).strip('"')) url = reverse('ui:index') if last_path: return '%s#%s' % (url, last_path) @@ -45,7 +45,7 @@ class CompleteView(BaseRedirectView): response.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) current_user = JSONRenderer().render(current_user.data) - current_user = urllib.quote('%s' % current_user, '') + current_user = urllib.parse.quote('%s' % current_user, '') response.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) return response diff --git a/awx/ui/fields.py b/awx/ui/fields.py index 20bddb39f1..32e08a12c1 100644 --- a/awx/ui/fields.py +++ b/awx/ui/fields.py @@ -3,6 +3,7 @@ # Python import base64 +import binascii import re # Django @@ -38,6 +39,6 @@ class CustomLogoField(fields.CharField): b64data = match.group(1) try: base64.b64decode(b64data) - except TypeError: + except (TypeError, binascii.Error): self.fail('invalid_data') return data diff --git a/docs/licenses/MarkupSafe.txt b/docs/licenses/MarkupSafe.txt new file mode 100644 index 0000000000..5d2693890d --- /dev/null +++ b/docs/licenses/MarkupSafe.txt @@ -0,0 +1,33 @@ +Copyright (c) 2010 by Armin Ronacher and contributors. See AUTHORS +for more details. + +Some rights reserved. + +Redistribution and use in source and binary forms of the software as well +as documentation, with or without modification, are permitted provided +that the following conditions are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +* The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/docs/licenses/backports.functools-lru-cache.txt b/docs/licenses/backports.functools-lru-cache.txt deleted file mode 100644 index 217d55c0d6..0000000000 --- a/docs/licenses/backports.functools-lru-cache.txt +++ /dev/null @@ -1,7 +0,0 @@ -Copyright Jason R. Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file diff --git a/docs/licenses/functools32.txt b/docs/licenses/functools32.txt deleted file mode 100644 index 43388e7e13..0000000000 --- a/docs/licenses/functools32.txt +++ /dev/null @@ -1,289 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.2 2.1.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2.1 2.2 2002 PSF yes - 2.2.2 2.2.1 2002 PSF yes - 2.2.3 2.2.2 2003 PSF yes - 2.3 2.2.2 2002-2003 PSF yes - 2.3.1 2.3 2002-2003 PSF yes - 2.3.2 2.3.1 2002-2003 PSF yes - 2.3.3 2.3.2 2002-2003 PSF yes - 2.3.4 2.3.3 2004 PSF yes - 2.3.5 2.3.4 2005 PSF yes - 2.4 2.3 2004 PSF yes - 2.4.1 2.4 2005 PSF yes - 2.4.2 2.4.1 2005 PSF yes - 2.4.3 2.4.2 2006 PSF yes - 2.4.4 2.4.3 2006 PSF yes - 2.5 2.4 2006 PSF yes - 2.5.1 2.5 2007 PSF yes - 2.5.2 2.5.1 2008 PSF yes - 2.5.3 2.5.2 2008 PSF yes - 2.6 2.5 2008 PSF yes - 2.6.1 2.6 2008 PSF yes - 2.6.2 2.6.1 2009 PSF yes - 2.6.3 2.6.2 2009 PSF yes - 2.6.4 2.6.3 2009 PSF yes - 2.6.5 2.6.4 2010 PSF yes - 3.0 2.6 2008 PSF yes - 3.0.1 3.0 2009 PSF yes - 3.1 3.0.1 2009 PSF yes - 3.1.1 3.1 2009 PSF yes - 3.1.2 3.1.1 2010 PSF yes - 3.1.3 3.1.2 2010 PSF yes - 3.1.4 3.1.3 2011 PSF yes - 3.2 3.1 2011 PSF yes - 3.2.1 3.2 2011 PSF yes - 3.2.2 3.2.1 2011 PSF yes - 3.2.3 3.2.2 2012 PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/docs/licenses/futures.txt b/docs/licenses/futures.txt deleted file mode 100644 index ba9ae16a62..0000000000 --- a/docs/licenses/futures.txt +++ /dev/null @@ -1,48 +0,0 @@ -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python -alone or in any derivative version, provided, however, that PSF's -License Agreement and PSF's notice of copyright, i.e., "Copyright (c) -2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights -Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/docs/licenses/jinja2.txt b/docs/licenses/jinja2.txt new file mode 100644 index 0000000000..10145a2643 --- /dev/null +++ b/docs/licenses/jinja2.txt @@ -0,0 +1,31 @@ +Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ * The names of the contributors may not be used to endorse or
+ promote products derived from this software without specific
+ prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/docs/licenses/psphere.txt b/docs/licenses/psphere.txt deleted file mode 100644 index f49a4e16e6..0000000000 --- a/docs/licenses/psphere.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License.
\ No newline at end of file diff --git a/docs/licenses/python-saml.txt b/docs/licenses/python3-saml.txt index 578413f8ea..dbbca9c6cb 100644 --- a/docs/licenses/python-saml.txt +++ b/docs/licenses/python3-saml.txt @@ -20,3 +20,4 @@ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/docs/licenses/suds.txt b/docs/licenses/suds.txt deleted file mode 100644 index 32fa870edc..0000000000 --- a/docs/licenses/suds.txt +++ /dev/null @@ -1,165 +0,0 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/requirements/requirements.in b/requirements/requirements.in index a28a629bce..a170668161 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -5,6 +5,7 @@ boto==2.47.0 channels==1.1.8 celery==4.2.1 daphne==1.3.0 # Last before backwards-incompatible channels 2 upgrade +defusedxml==0.5.0 # py36 support https://github.com/tiran/defusedxml/pull/4 Django==1.11.16 django-auth-ldap==1.7.0 django-crum==0.7.2 @@ -13,13 +14,14 @@ django-jsonfield==1.0.1 django-oauth-toolkit==1.1.3 django-polymorphic==2.0.2 django-pglocks==1.0.2 -django-radius==1.1.0 +django-radius==1.3.3 django-solo==1.1.3 django-split-settings==0.3.0 django-taggit==0.22.2 djangorestframework==3.7.7 djangorestframework-yaml==1.0.3 irc==16.2 +jinja2=2.10 jsonschema==2.6.0 Markdown==2.6.11 # used for formatting API help ordereddict==1.1 @@ -32,7 +34,7 @@ python-dateutil==2.7.2 # contains support for TZINFO= parsing python-logstash==0.4.6 python-memcached==1.59 python-radius==1.0 -python-saml==2.4.0 +python3-saml==1.4.0 social-auth-core==1.7.0 social-auth-app-django==2.1.0 redbaron==0.6.3 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 40eeeff1fa..386b10b9da 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -22,7 +22,7 @@ channels==1.1.8 constantly==15.1.0 # via twisted cryptography==2.3.1 # via requests daphne==1.3.0 -defusedxml==0.4.1 # via python-saml +defusedxml==0.5.0 # via python3-saml django-auth-ldap==1.7.0 django-crum==0.7.2 django-extensions==2.0.0 @@ -30,7 +30,7 @@ django-jsonfield==1.0.1 django-oauth-toolkit==1.1.3 django-pglocks==1.0.2 django-polymorphic==2.0.2 -django-radius==1.1.0 +django-radius==1.3.3 django-solo==1.1.3 django-split-settings==0.3.0 django-taggit==0.22.2 @@ -53,11 +53,13 @@ jaraco.itertools==2.1.1 # via irc jaraco.logging==1.5.1 # via irc jaraco.stream==1.1.2 # via irc jaraco.text==1.10 # via irc, jaraco.collections +jinja2==2.10 jsonpickle==0.9.6 # via asgi-amqp jsonschema==2.6.0 kombu==4.2.1 # via asgi-amqp, celery lxml==4.2.3 markdown==2.6.11 +MarkupSafe==1.0 # via jinja2 more-itertools==4.1.0 # via irc, jaraco.functools, jaraco.itertools msgpack-python==0.5.5 # via asgi-amqp netaddr==0.7.19 # via pyrad @@ -73,14 +75,14 @@ pycparser==2.18 # via cffi pygerduty==0.37.0 pyjwt==1.6.0 # via social-auth-core, twilio pyparsing==2.2.0 -pyrad==1.2 # via django-radius +pyrad==2.1 # via django-radius python-dateutil==2.7.2 python-ldap==3.1.0 # via django-auth-ldap python-logstash==0.4.6 python-memcached==1.59 python-openid==2.2.5 # via social-auth-core python-radius==1.0 -python-saml==2.4.0 +python3-saml==1.4.0 pytz==2018.5 # via celery, django, irc, tempora, twilio pyyaml==3.12 # via djangorestframework-yaml redbaron==0.6.3 diff --git a/requirements/requirements_git.txt b/requirements/requirements_git.txt index 24083287d6..9ba48b14ff 100644 --- a/requirements/requirements_git.txt +++ b/requirements/requirements_git.txt @@ -1,4 +1,4 @@ git+https://github.com/ansible/ansiconv.git@tower_1.0.0#egg=ansiconv -git+https://github.com/ansible/django-qsstats-magic.git@tower_0.7.2#egg=django-qsstats-magic +git+https://github.com/ansible/django-qsstats-magic.git@py3#egg=django-qsstats-magic git+https://github.com/ryanpetrello/dm.xmlsec.binding.git@master#egg=dm.xmlsec.binding git+https://github.com/ansible/django-jsonbfield@fix-sqlite_serialization#egg=jsonbfield diff --git a/tools/clusterdevel/bootstrap_minishift.sh b/tools/clusterdevel/bootstrap_minishift.sh index 5f25838f53..d5536664a6 100755 --- a/tools/clusterdevel/bootstrap_minishift.sh +++ b/tools/clusterdevel/bootstrap_minishift.sh @@ -16,7 +16,6 @@ fi #make awx-link python setup.py develop -ln -s /awx_devel/tools/rdb.py /venv/awx/lib/python3.6/site-packages/rdb.py || true yes | cp -rf /awx_devel/tools/docker-compose/supervisor.conf /supervisor.conf # AWX bootstrapping diff --git a/tools/data_generators/rbac_dummy_data_generator.py b/tools/data_generators/rbac_dummy_data_generator.py index f310b9f2ed..ff2b9bce17 100755 --- a/tools/data_generators/rbac_dummy_data_generator.py +++ b/tools/data_generators/rbac_dummy_data_generator.py @@ -147,14 +147,14 @@ def spread(n, m): ret = [] # At least one in each slot, split up the rest exponentially so the first # buckets contain a lot of entries - for i in xrange(m): + for i in range(m): if n > 0: ret.append(1) n -= 1 else: ret.append(0) - for i in xrange(m): + for i in range(m): n_in_this_slot = n // 2 n-= n_in_this_slot ret[i] += n_in_this_slot @@ -239,7 +239,7 @@ def make_the_data(): print('# Creating %d organizations' % n_organizations) - for i in xrange(n_organizations): + for i in range(n_organizations): sys.stdout.write('\r%d ' % (i + 1)) sys.stdout.flush() org, _ = Organization.objects.get_or_create(name='%s Organization %d' % (prefix, i)) diff --git a/tools/docker-compose/Dockerfile b/tools/docker-compose/Dockerfile index 56382ee0d5..136f531f8a 100644 --- a/tools/docker-compose/Dockerfile +++ b/tools/docker-compose/Dockerfile @@ -11,7 +11,7 @@ RUN yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.4/redhat/ RUN curl --silent --location https://rpm.nodesource.com/setup_8.x | bash - RUN yum -y update && yum -y install openssh-server ansible mg vim tmux \ - git2u-core mercurial subversion python36 python36-devel python-psycopg2 make postgresql \ + git2u-core mercurial subversion python-devel python36 python36-devel python-psycopg2 make postgresql \ postgresql-devel nginx nodejs python-psutil libxml2-devel libxslt-devel \ libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel \ xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 \ @@ -21,6 +21,7 @@ whitelist_externals = make ; SKIP_SLOW_TESTS = True [testenv:api-lint] +basepython = python3.6 deps = -r{toxinidir}/requirements/requirements_dev.txt coverage |