mirror of
https://github.com/ansible/awx.git
synced 2024-11-01 08:21:15 +03:00
Merge pull request #1175 from cclauss/unicode-to-six-u
Change unicode() --> six.text_type() for Python 3
This commit is contained in:
commit
31d0e55c2a
@ -72,7 +72,7 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
data = unicode(data)
|
||||
data = six.text_type(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
|
@ -2090,7 +2090,7 @@ class HostList(HostRelatedSearchMixin, ListCreateAPIView):
|
||||
try:
|
||||
return super(HostList, self).list(*args, **kwargs)
|
||||
except Exception as e:
|
||||
return Response(dict(error=_(unicode(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_(six.text_type(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class HostDetail(ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
@ -5,6 +5,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now
|
||||
@ -41,7 +43,7 @@ class Command(BaseCommand):
|
||||
n_deleted_items = 0
|
||||
pks_to_delete = set()
|
||||
for asobj in ActivityStream.objects.iterator():
|
||||
asobj_disp = '"%s" id: %s' % (unicode(asobj), asobj.id)
|
||||
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
|
||||
if asobj.timestamp >= self.cutoff:
|
||||
if self.dry_run:
|
||||
self.logger.info("would skip %s" % asobj_disp)
|
||||
|
@ -5,6 +5,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
@ -66,7 +68,7 @@ class Command(BaseCommand):
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(unicode(job),
|
||||
(six.text_type(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
if job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -87,7 +89,7 @@ class Command(BaseCommand):
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(unicode(ad_hoc_command),
|
||||
(six.text_type(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -107,7 +109,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
|
||||
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
|
||||
@ -130,7 +132,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
|
||||
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
@ -153,7 +155,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
|
||||
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
@ -183,7 +185,7 @@ class Command(BaseCommand):
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
unicode(workflow_job),
|
||||
six.text_type(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -204,7 +206,7 @@ class Command(BaseCommand):
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
unicode(notification), unicode(notification.created),
|
||||
six.text_type(notification), six.text_type(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
if notification.status in ('pending',):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -246,4 +248,3 @@ class Command(BaseCommand):
|
||||
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
else:
|
||||
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
|
||||
|
@ -3,6 +3,8 @@ import logging
|
||||
from django.utils.timezone import now
|
||||
from django.utils.text import slugify
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main import utils
|
||||
|
||||
@ -13,7 +15,7 @@ logger = logging.getLogger('awx.main.migrations')
|
||||
def _create_fact_scan_project(ContentType, Project, org):
|
||||
ct = ContentType.objects.get_for_model(Project)
|
||||
name = u"Tower Fact Scan - {}".format(org.name if org else "No Organization")
|
||||
proj = Project(name=name,
|
||||
proj = Project(name=name,
|
||||
scm_url='https://github.com/ansible/awx-facts-playbooks',
|
||||
scm_type='git',
|
||||
scm_update_on_launch=True,
|
||||
@ -24,7 +26,7 @@ def _create_fact_scan_project(ContentType, Project, org):
|
||||
polymorphic_ctype=ct)
|
||||
proj.save()
|
||||
|
||||
slug_name = slugify(unicode(name)).replace(u'-', u'_')
|
||||
slug_name = slugify(six.text_type(name)).replace(u'-', u'_')
|
||||
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
|
||||
|
||||
proj.save()
|
||||
@ -51,10 +53,10 @@ def _migrate_scan_job_templates(apps):
|
||||
Project = apps.get_model('main', 'Project')
|
||||
|
||||
project_no_org = None
|
||||
|
||||
|
||||
# A scan job template with a custom project will retain the custom project.
|
||||
JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=False).update(use_fact_cache=True, job_type=PERM_INVENTORY_DEPLOY)
|
||||
|
||||
|
||||
# Scan jobs templates using Tower's default scan playbook will now point at
|
||||
# the same playbook but in a github repo.
|
||||
jts = _get_tower_scan_job_templates(JobTemplate)
|
||||
|
@ -1274,7 +1274,7 @@ class InventorySourceOptions(BaseModel):
|
||||
source_vars_dict = VarsDictProperty('source_vars')
|
||||
|
||||
def clean_instance_filters(self):
|
||||
instance_filters = unicode(self.instance_filters or '')
|
||||
instance_filters = six.text_type(self.instance_filters or '')
|
||||
if self.source == 'ec2':
|
||||
invalid_filters = []
|
||||
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
|
||||
@ -1300,7 +1300,7 @@ class InventorySourceOptions(BaseModel):
|
||||
return ''
|
||||
|
||||
def clean_group_by(self):
|
||||
group_by = unicode(self.group_by or '')
|
||||
group_by = six.text_type(self.group_by or '')
|
||||
if self.source == 'ec2':
|
||||
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
|
||||
valid_choices = [x[0] for x in get_choices()]
|
||||
|
@ -14,6 +14,8 @@ from django.contrib.auth.models import User
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import AutoOneToOneField, ImplicitRoleField
|
||||
@ -159,7 +161,7 @@ class AuthToken(BaseModel):
|
||||
def reason_long(reason):
|
||||
for x in AuthToken.REASON_CHOICES:
|
||||
if x[0] == reason:
|
||||
return unicode(x[1])
|
||||
return six.text_type(x[1])
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
|
@ -15,6 +15,8 @@ from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.timezone import now, make_aware, get_default_timezone
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import * # noqa
|
||||
@ -124,7 +126,7 @@ class ProjectOptions(models.Model):
|
||||
def clean_scm_url(self):
|
||||
if self.scm_type == 'insights':
|
||||
self.scm_url = settings.INSIGHTS_URL_BASE
|
||||
scm_url = unicode(self.scm_url or '')
|
||||
scm_url = six.text_type(self.scm_url or '')
|
||||
if not self.scm_type:
|
||||
return ''
|
||||
try:
|
||||
@ -135,7 +137,7 @@ class ProjectOptions(models.Model):
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
if self.scm_type and not any(scm_url_parts):
|
||||
raise ValidationError(_('SCM URL is required.'))
|
||||
return unicode(self.scm_url or '')
|
||||
return six.text_type(self.scm_url or '')
|
||||
|
||||
def clean_credential(self):
|
||||
if not self.scm_type:
|
||||
@ -328,7 +330,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
update_fields.append('scm_delete_on_next_update')
|
||||
# Create auto-generated local path if project uses SCM.
|
||||
if self.pk and self.scm_type and not self.local_path.startswith('_'):
|
||||
slug_name = slugify(unicode(self.name)).replace(u'-', u'_')
|
||||
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_')
|
||||
self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
|
||||
if 'local_path' not in update_fields:
|
||||
update_fields.append('local_path')
|
||||
|
@ -16,6 +16,8 @@ from django.dispatch import receiver
|
||||
from crum import get_current_request, get_current_user
|
||||
from crum.signals import current_user_getter
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.api.serializers import * # noqa
|
||||
@ -88,7 +90,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
|
||||
elif sender == Group.inventory_sources.through:
|
||||
sender_name = 'group.inventory_sources'
|
||||
else:
|
||||
sender_name = unicode(sender._meta.verbose_name)
|
||||
sender_name = six.text_type(sender._meta.verbose_name)
|
||||
if kwargs['signal'] == post_save:
|
||||
if sender == Job:
|
||||
return
|
||||
@ -118,7 +120,7 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
|
||||
pass
|
||||
else:
|
||||
return
|
||||
sender_name = unicode(sender._meta.verbose_name)
|
||||
sender_name = six.text_type(sender._meta.verbose_name)
|
||||
logger.debug("%s created or deleted, updating inventory computed fields: %r %r",
|
||||
sender_name, sender, kwargs)
|
||||
try:
|
||||
|
@ -3,6 +3,7 @@ from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import DataMigration
|
||||
from django.db import models
|
||||
import six
|
||||
|
||||
|
||||
class Migration(DataMigration):
|
||||
@ -12,7 +13,7 @@ class Migration(DataMigration):
|
||||
if jhs.host is not None and jhs.host.active:
|
||||
jhs.host_name = jhs.host.name
|
||||
else:
|
||||
jhs.host_name = "tower_deleted_host-%s" % unicode(idx)
|
||||
jhs.host_name = "tower_deleted_host-%s" % six.text_type(idx)
|
||||
jhs.save()
|
||||
|
||||
def backwards(self, orm):
|
||||
|
@ -44,6 +44,8 @@ from django.core.exceptions import ObjectDoesNotExist
|
||||
# Django-CRUM
|
||||
from crum import impersonate
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx import celery_app
|
||||
@ -1168,7 +1170,7 @@ class RunJob(BaseTask):
|
||||
env['ANSIBLE_NET_SSH_KEYFILE'] = ssh_keyfile
|
||||
|
||||
authorize = network_cred.authorize
|
||||
env['ANSIBLE_NET_AUTHORIZE'] = unicode(int(authorize))
|
||||
env['ANSIBLE_NET_AUTHORIZE'] = six.text_type(int(authorize))
|
||||
if authorize:
|
||||
env['ANSIBLE_NET_AUTH_PASS'] = decrypt_field(network_cred, 'authorize_password')
|
||||
|
||||
@ -1767,7 +1769,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
ec2_opts['cache_path'] = cache_path
|
||||
ec2_opts.setdefault('cache_max_age', '300')
|
||||
for k,v in ec2_opts.items():
|
||||
cp.set(section, k, unicode(v))
|
||||
cp.set(section, k, six.text_type(v))
|
||||
# Allow custom options to vmware inventory script.
|
||||
elif inventory_update.source == 'vmware':
|
||||
credential = inventory_update.credential
|
||||
@ -1787,7 +1789,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
vmware_opts.setdefault('groupby_patterns', inventory_update.group_by)
|
||||
|
||||
for k,v in vmware_opts.items():
|
||||
cp.set(section, k, unicode(v))
|
||||
cp.set(section, k, six.text_type(v))
|
||||
|
||||
elif inventory_update.source == 'satellite6':
|
||||
section = 'foreman'
|
||||
@ -1803,7 +1805,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
elif k == 'satellite6_group_prefix' and isinstance(v, basestring):
|
||||
group_prefix = v
|
||||
else:
|
||||
cp.set(section, k, unicode(v))
|
||||
cp.set(section, k, six.text_type(v))
|
||||
|
||||
credential = inventory_update.credential
|
||||
if credential:
|
||||
@ -1939,7 +1941,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
elif inventory_update.source in ['scm', 'custom']:
|
||||
for env_k in inventory_update.source_vars_dict:
|
||||
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
|
||||
env[str(env_k)] = unicode(inventory_update.source_vars_dict[env_k])
|
||||
env[str(env_k)] = six.text_type(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'tower':
|
||||
env['TOWER_INVENTORY'] = inventory_update.instance_filters
|
||||
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
|
||||
|
@ -2,6 +2,8 @@ import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
from django.utils import timezone
|
||||
@ -105,7 +107,7 @@ def test_content(hosts, fact_scans, get, user, fact_ansible_json, monkeypatch_js
|
||||
|
||||
assert fact_known.host_id == response.data['host']
|
||||
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
|
||||
assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], unicode) else response.data['facts'])
|
||||
assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert fact_known.module == response.data['module']
|
||||
|
||||
@ -117,7 +119,7 @@ def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name)
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
|
||||
|
||||
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
|
||||
assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], unicode) else response.data['facts'])
|
||||
assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert module_name == response.data['module']
|
||||
|
||||
|
@ -10,6 +10,8 @@ from awx.main.utils.filters import SmartFilter
|
||||
# Django
|
||||
from django.db.models import Q
|
||||
|
||||
import six
|
||||
|
||||
|
||||
Field = namedtuple('Field', 'name')
|
||||
Meta = namedtuple('Meta', 'fields')
|
||||
@ -43,7 +45,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_query_generated(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string", [
|
||||
'ansible_facts__facts__facts__blank='
|
||||
@ -60,7 +62,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_unicode(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('(a=b)', Q(**{u"a": u"b"})),
|
||||
@ -76,7 +78,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_boolean_parenthesis(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})),
|
||||
@ -99,7 +101,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
#('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})),
|
||||
@ -109,7 +111,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated_unicode(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": u"null"}})),
|
||||
@ -117,7 +119,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
@ -132,7 +134,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert unicode(q) == unicode(q_expected)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
|
||||
'''
|
||||
|
@ -43,7 +43,7 @@ logger = logging.getLogger('awx.main.utils')
|
||||
__all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'memoize', 'memoize_delete',
|
||||
'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_version', 'update_scm_url',
|
||||
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class',
|
||||
'copy_m2m_relationships' ,'cache_list_capabilities', 'to_python_boolean',
|
||||
'copy_m2m_relationships', 'cache_list_capabilities', 'to_python_boolean',
|
||||
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
|
||||
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'NoDefaultProvided',
|
||||
'get_current_apps', 'set_current_apps', 'OutputEventFilter',
|
||||
@ -84,7 +84,7 @@ def get_object_or_403(klass, *args, **kwargs):
|
||||
|
||||
|
||||
def to_python_boolean(value, allow_none=False):
|
||||
value = unicode(value)
|
||||
value = six.text_type(value)
|
||||
if value.lower() in ('true', '1', 't'):
|
||||
return True
|
||||
elif value.lower() in ('false', '0', 'f'):
|
||||
@ -92,7 +92,7 @@ def to_python_boolean(value, allow_none=False):
|
||||
elif allow_none and value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % unicode(value))
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % six.text_type(value))
|
||||
|
||||
|
||||
def camelcase_to_underscore(s):
|
||||
@ -325,7 +325,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
netloc = u''
|
||||
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
|
||||
if parts.port:
|
||||
netloc = u':'.join([netloc, unicode(parts.port)])
|
||||
netloc = u':'.join([netloc, six.text_type(parts.port)])
|
||||
new_url = urlparse.urlunsplit([parts.scheme, netloc, parts.path,
|
||||
parts.query, parts.fragment])
|
||||
if scp_format and parts.scheme == 'git+ssh':
|
||||
|
@ -8,6 +8,8 @@ from pyparsing import (
|
||||
ParseException,
|
||||
)
|
||||
|
||||
import six
|
||||
|
||||
import django
|
||||
|
||||
from awx.main.utils.common import get_search_fields
|
||||
@ -54,12 +56,12 @@ class SmartFilter(object):
|
||||
self.result = Host.objects.filter(**kwargs)
|
||||
|
||||
def strip_quotes_traditional_logic(self, v):
|
||||
if type(v) is unicode and v.startswith('"') and v.endswith('"'):
|
||||
if type(v) is six.text_type and v.startswith('"') and v.endswith('"'):
|
||||
return v[1:-1]
|
||||
return v
|
||||
|
||||
def strip_quotes_json_logic(self, v):
|
||||
if type(v) is unicode and v.startswith('"') and v.endswith('"') and v != u'"null"':
|
||||
if type(v) is six.text_type and v.startswith('"') and v.endswith('"') and v != u'"null"':
|
||||
return v[1:-1]
|
||||
return v
|
||||
|
||||
@ -138,7 +140,7 @@ class SmartFilter(object):
|
||||
# value
|
||||
# ="something"
|
||||
if t_len > (v_offset + 2) and t[v_offset] == "\"" and t[v_offset + 2] == "\"":
|
||||
v = u'"' + unicode(t[v_offset + 1]) + u'"'
|
||||
v = u'"' + six.text_type(t[v_offset + 1]) + u'"'
|
||||
#v = t[v_offset + 1]
|
||||
# empty ""
|
||||
elif t_len > (v_offset + 1):
|
||||
@ -207,9 +209,9 @@ class SmartFilter(object):
|
||||
* handle key with __ in it
|
||||
'''
|
||||
filter_string_raw = filter_string
|
||||
filter_string = unicode(filter_string)
|
||||
filter_string = six.text_type(filter_string)
|
||||
|
||||
unicode_spaces = list(set(unicode(c) for c in filter_string if c.isspace()))
|
||||
unicode_spaces = list(set(six.text_type(c) for c in filter_string if c.isspace()))
|
||||
unicode_spaces_other = unicode_spaces + [u'(', u')', u'=', u'"']
|
||||
atom = CharsNotIn(unicode_spaces_other)
|
||||
atom_inside_quotes = CharsNotIn(u'"')
|
||||
|
@ -269,7 +269,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
logger.warn("Could not map user detail '%s' from SAML attribute '%s'; "
|
||||
"update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
|
||||
conf_key[5:], key, self.name, conf_key)
|
||||
return unicode(value) if value is not None else value
|
||||
return six.text_type(value) if value is not None else value
|
||||
|
||||
|
||||
class SAMLAuth(BaseSAMLAuth):
|
||||
|
Loading…
Reference in New Issue
Block a user