Browse Source

Merge pull request #3001 from detiber/python3

python3 support, add tox for better local testing against multiple python versions
Scott Dodson 8 years ago
parent
commit
4c20c6b76f

+ 4 - 0
.gitignore

@@ -21,3 +21,7 @@ multi_inventory.yaml
 ansible.cfg
 *.retry
 .vscode/*
+.cache
+.tox
+.coverage
+*.egg-info

+ 4 - 0
.travis.yml

@@ -1,9 +1,13 @@
 ---
 sudo: false
 
+cache:
+  - pip
+
 language: python
 python:
   - "2.7"
+  - "3.5"
 
 install:
   - pip install -r requirements.txt

+ 2 - 19
callback_plugins/openshift_quick_installer.py

@@ -36,30 +36,13 @@ What's different:
 """
 
 from __future__ import (absolute_import, print_function)
-import imp
-import os
 import sys
 from ansible import constants as C
+from ansible.plugins.callback import CallbackBase
 from ansible.utils.color import colorize, hostcolor
-ANSIBLE_PATH = imp.find_module('ansible')[1]
-DEFAULT_PATH = os.path.join(ANSIBLE_PATH, 'plugins/callback/default.py')
-DEFAULT_MODULE = imp.load_source(
-    'ansible.plugins.callback.default',
-    DEFAULT_PATH
-)
 
-try:
-    from ansible.plugins.callback import CallbackBase
-    BASECLASS = CallbackBase
-except ImportError:  # < ansible 2.1
-    BASECLASS = DEFAULT_MODULE.CallbackModule
 
-
-reload(sys)
-sys.setdefaultencoding('utf-8')
-
-
-class CallbackModule(DEFAULT_MODULE.CallbackModule):
+class CallbackModule(CallbackBase):
 
     """
     Ansible callback plugin

+ 15 - 14
filter_plugins/oo_filters.py

@@ -19,6 +19,7 @@ from distutils.version import LooseVersion
 from operator import itemgetter
 from ansible.parsing.yaml.dumper import AnsibleDumper
 from urlparse import urlparse
+from six import string_types
 
 HAS_OPENSSL = False
 try:
@@ -120,7 +121,7 @@ class FilterModule(object):
             raise errors.AnsibleFilterError("|failed expects hostvars is dictionary or object")
         if not isinstance(variables, dict):
             raise errors.AnsibleFilterError("|failed expects variables is a dictionary")
-        if not isinstance(inventory_hostname, basestring):
+        if not isinstance(inventory_hostname, string_types):
             raise errors.AnsibleFilterError("|failed expects inventory_hostname is a string")
         # pylint: disable=no-member
         ansible_version = pkg_resources.get_distribution("ansible").version
@@ -215,7 +216,7 @@ class FilterModule(object):
         """
         if not isinstance(data, list):
             raise errors.AnsibleFilterError("|failed expects first param is a list")
-        if not all(isinstance(x, basestring) for x in data):
+        if not all(isinstance(x, string_types) for x in data):
             raise errors.AnsibleFilterError("|failed expects first param is a list"
                                             " of strings")
         retval = [prepend + s for s in data]
@@ -362,7 +363,7 @@ class FilterModule(object):
         if not isinstance(data, list):
             raise errors.AnsibleFilterError("|failed expects to filter on a list")
 
-        if not isinstance(filter_attr, basestring):
+        if not isinstance(filter_attr, string_types):
             raise errors.AnsibleFilterError("|failed expects filter_attr is a str or unicode")
 
         # Gather up the values for the list of keys passed in
@@ -401,9 +402,9 @@ class FilterModule(object):
         """
         if not isinstance(nodes, list):
             raise errors.AnsibleFilterError("failed expects to filter on a list")
-        if not isinstance(label, basestring):
+        if not isinstance(label, string_types):
             raise errors.AnsibleFilterError("failed expects label to be a string")
-        if value is not None and not isinstance(value, basestring):
+        if value is not None and not isinstance(value, string_types):
             raise errors.AnsibleFilterError("failed expects value to be a string")
 
         def label_filter(node):
@@ -419,7 +420,7 @@ class FilterModule(object):
             else:
                 return False
 
-            if isinstance(labels, basestring):
+            if isinstance(labels, string_types):
                 labels = yaml.safe_load(labels)
             if not isinstance(labels, dict):
                 raise errors.AnsibleFilterError(
@@ -518,7 +519,7 @@ class FilterModule(object):
                            "cafile": "/etc/origin/master/named_certificates/custom-ca-2.crt",
                            "names": [ "some-hostname.com" ] }]
         """
-        if not isinstance(named_certs_dir, basestring):
+        if not isinstance(named_certs_dir, string_types):
             raise errors.AnsibleFilterError("|failed expects named_certs_dir is str or unicode")
 
         if not isinstance(internal_hostnames, list):
@@ -545,7 +546,7 @@ class FilterModule(object):
                     if cert.get_extension(i).get_short_name() == 'subjectAltName':
                         for name in str(cert.get_extension(i)).replace('DNS:', '').split(', '):
                             certificate['names'].append(name)
-            except:
+            except Exception:
                 raise errors.AnsibleFilterError(("|failed to parse certificate '%s', " % certificate['certfile'] +
                                                  "please specify certificate names in host inventory"))
 
@@ -670,7 +671,7 @@ class FilterModule(object):
 
         migrations = {'openshift_router_selector': 'openshift_hosted_router_selector',
                       'openshift_registry_selector': 'openshift_hosted_registry_selector'}
-        for old_fact, new_fact in migrations.iteritems():
+        for old_fact, new_fact in migrations.items():
             if old_fact in facts and new_fact not in facts:
                 facts[new_fact] = facts[old_fact]
         return facts
@@ -781,7 +782,7 @@ class FilterModule(object):
         """
         if not isinstance(rpms, list):
             raise errors.AnsibleFilterError("failed expects to filter on a list")
-        if openshift_version is not None and not isinstance(openshift_version, basestring):
+        if openshift_version is not None and not isinstance(openshift_version, string_types):
             raise errors.AnsibleFilterError("failed expects openshift_version to be a string")
 
         rpms_31 = []
@@ -800,9 +801,9 @@ class FilterModule(object):
         """
         if not isinstance(pods, list):
             raise errors.AnsibleFilterError("failed expects to filter on a list")
-        if not isinstance(deployment_type, basestring):
+        if not isinstance(deployment_type, string_types):
             raise errors.AnsibleFilterError("failed expects deployment_type to be a string")
-        if not isinstance(component, basestring):
+        if not isinstance(component, string_types):
             raise errors.AnsibleFilterError("failed expects component to be a string")
 
         image_prefix = 'openshift/origin-'
@@ -843,7 +844,7 @@ class FilterModule(object):
             Ex. v3.2.0.10 -> -3.2.0.10
                 v1.2.0-rc1 -> -1.2.0
         """
-        if not isinstance(version, basestring):
+        if not isinstance(version, string_types):
             raise errors.AnsibleFilterError("|failed expects a string or unicode")
         if version.startswith("v"):
             version = version[1:]
@@ -861,7 +862,7 @@ class FilterModule(object):
 
             Ex: https://ose3-master.example.com/v1/api -> ose3-master.example.com
         """
-        if not isinstance(url, basestring):
+        if not isinstance(url, string_types):
             raise errors.AnsibleFilterError("|failed expects a string or unicode")
         parse_result = urlparse(url)
         if parse_result.netloc != '':

+ 7 - 15
filter_plugins/openshift_master.py

@@ -6,22 +6,14 @@ Custom filters for use in openshift-master
 '''
 import copy
 import sys
-import yaml
+
+from distutils.version import LooseVersion  # pylint: disable=no-name-in-module,import-error
 
 from ansible import errors
+from ansible.plugins.filter.core import to_bool as ansible_bool
+from six import string_types
 
-# pylint: disable=no-name-in-module,import-error,wrong-import-order
-from distutils.version import LooseVersion
-try:
-    # ansible-2.1
-    from ansible.plugins.filter.core import to_bool as ansible_bool
-except ImportError:
-    try:
-        # ansible-2.0.x
-        from ansible.runner.filter_plugins.core import bool as ansible_bool
-    except ImportError:
-        # ansible-1.9.x
-        from ansible.plugins.filter.core import bool as ansible_bool
+import yaml
 
 
 class IdentityProviderBase(object):
@@ -513,7 +505,7 @@ class FilterModule(object):
                            'master3.example.com']
                returns True
         '''
-        if not issubclass(type(data), basestring):
+        if not issubclass(type(data), string_types):
             raise errors.AnsibleFilterError("|failed expects data is a string or unicode")
         if not issubclass(type(masters), list):
             raise errors.AnsibleFilterError("|failed expects masters is a list")
@@ -558,7 +550,7 @@ class FilterModule(object):
     def oo_htpasswd_users_from_file(file_contents):
         ''' return a dictionary of htpasswd users from htpasswd file contents '''
         htpasswd_entries = {}
-        if not isinstance(file_contents, basestring):
+        if not isinstance(file_contents, string_types):
             raise errors.AnsibleFilterError("failed, expects to filter on a string")
         for line in file_contents.splitlines():
             user = None

+ 3 - 3
git/.pylintrc

@@ -8,7 +8,7 @@
 #init-hook=
 
 # Profiled execution.
-profile=no
+#profile=no
 
 # Add files or directories to the blacklist. They should be base names, not
 # paths.
@@ -98,7 +98,7 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme
 
 # Add a comment according to your evaluation note. This is used by the global
 # evaluation report (RP0004).
-comment=no
+#comment=no
 
 # Template used to display messages. This is a python new-style format string
 # used to format the message information. See doc for all details
@@ -249,7 +249,7 @@ ignored-classes=SQLObject
 
 # When zope mode is activated, add a predefined set of Zope acquired attributes
 # to generated-members.
-zope=no
+#zope=no
 
 # List of members which are set dynamically and missed by pylint inference
 # system, and so shouldn't trigger E0201 when accessed. Python regular

+ 1 - 1
library/modify_yaml.py

@@ -95,7 +95,7 @@ def main():
 
     # ignore broad-except error to avoid stack trace to ansible user
     # pylint: disable=broad-except
-    except Exception, e:
+    except Exception as e:
         return module.fail_json(msg=str(e))
 
 

+ 1 - 1
playbooks/common/openshift-cluster/upgrades/library/openshift_upgrade_config.py

@@ -146,7 +146,7 @@ def main():
 
     # ignore broad-except error to avoid stack trace to ansible user
     # pylint: disable=broad-except
-    except Exception, e:
+    except Exception as e:
         return module.fail_json(msg=str(e))
 
 

+ 5 - 3
roles/kube_nfs_volumes/library/partitionpool.py

@@ -3,6 +3,8 @@
 Ansible module for partitioning.
 """
 
+from __future__ import print_function
+
 # There is no pyparted on our Jenkins worker
 # pylint: disable=import-error
 import parted
@@ -131,7 +133,7 @@ def partition(diskname, specs, force=False, check_mode=False):
         disk = None
 
     if disk and len(disk.partitions) > 0 and not force:
-        print "skipping", diskname
+        print("skipping", diskname)
         return 0
 
     # create new partition table, wiping all existing data
@@ -220,7 +222,7 @@ def main():
 
     try:
         specs = parse_spec(sizes)
-    except ValueError, ex:
+    except ValueError as ex:
         err = "Error parsing sizes=" + sizes + ": " + str(ex)
         module.fail_json(msg=err)
 
@@ -229,7 +231,7 @@ def main():
     for disk in disks.split(","):
         try:
             changed_count += partition(disk, specs, force, module.check_mode)
-        except Exception, ex:
+        except Exception as ex:
             err = "Error creating partitions on " + disk + ": " + str(ex)
             raise
             # module.fail_json(msg=err)

+ 8 - 4
roles/openshift_certificate_expiry/library/openshift_cert_expiry.py

@@ -371,7 +371,7 @@ an OpenShift Container Platform cluster
         ######################################################################
         # Load the certificate and the CA, parse their expiration dates into
         # datetime objects so we can manipulate them later
-        for _, v in cert_meta.iteritems():
+        for _, v in cert_meta.items():
             with open(v, 'r') as fp:
                 cert = fp.read()
                 cert_subject, cert_expiry_date, time_remaining = load_and_handle_cert(cert, now)
@@ -654,9 +654,13 @@ an OpenShift Container Platform cluster
     # will be at the front of the list and certificates which will
     # expire later are at the end. Router and registry certs should be
     # limited to just 1 result, so don't bother sorting those.
-    check_results['ocp_certs'] = sorted(check_results['ocp_certs'], cmp=lambda x, y: cmp(x['days_remaining'], y['days_remaining']))
-    check_results['kubeconfigs'] = sorted(check_results['kubeconfigs'], cmp=lambda x, y: cmp(x['days_remaining'], y['days_remaining']))
-    check_results['etcd'] = sorted(check_results['etcd'], cmp=lambda x, y: cmp(x['days_remaining'], y['days_remaining']))
+    def cert_key(item):
+        ''' return the days_remaining key '''
+        return item['days_remaining']
+
+    check_results['ocp_certs'] = sorted(check_results['ocp_certs'], key=cert_key)
+    check_results['kubeconfigs'] = sorted(check_results['kubeconfigs'], key=cert_key)
+    check_results['etcd'] = sorted(check_results['etcd'], key=cert_key)
 
     # This module will never change anything, but we might want to
     # change the return code parameter if there is some catastrophic

+ 14 - 13
roles/openshift_facts/library/openshift_facts.py

@@ -26,6 +26,8 @@ import struct
 import socket
 from distutils.util import strtobool
 from distutils.version import LooseVersion
+from six import string_types
+from six import text_type
 
 # ignore pylint errors related to the module_utils import
 # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import
@@ -87,7 +89,7 @@ def migrate_docker_facts(facts):
     # log_options was originally meant to be a comma separated string, but
     # we now prefer an actual list, with backward compatibility:
     if 'log_options' in facts['docker'] and \
-            isinstance(facts['docker']['log_options'], basestring):
+            isinstance(facts['docker']['log_options'], string_types):
         facts['docker']['log_options'] = facts['docker']['log_options'].split(",")
 
     return facts
@@ -226,7 +228,7 @@ def choose_hostname(hostnames=None, fallback=''):
         return hostname
 
     ip_regex = r'\A\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z'
-    ips = [i for i in hostnames if i is not None and isinstance(i, basestring) and re.match(ip_regex, i)]
+    ips = [i for i in hostnames if i is not None and isinstance(i, string_types) and re.match(ip_regex, i)]
     hosts = [i for i in hostnames if i is not None and i != '' and i not in ips]
 
     for host_list in (hosts, ips):
@@ -363,7 +365,7 @@ def normalize_aws_facts(metadata, facts):
         var_map = {'ips': 'local-ipv4s', 'public_ips': 'public-ipv4s'}
         for ips_var, int_var in iteritems(var_map):
             ips = interface.get(int_var)
-            if isinstance(ips, basestring):
+            if isinstance(ips, string_types):
                 int_info[ips_var] = [ips]
             else:
                 int_info[ips_var] = ips
@@ -772,7 +774,7 @@ def set_etcd_facts_if_unset(facts):
         # Read ETCD_DATA_DIR from /etc/etcd/etcd.conf:
         try:
             # Add a fake section for parsing:
-            ini_str = unicode('[root]\n' + open('/etc/etcd/etcd.conf', 'r').read(), 'utf-8')
+            ini_str = text_type('[root]\n' + open('/etc/etcd/etcd.conf', 'r').read(), 'utf-8')
             ini_fp = io.StringIO(ini_str)
             config = ConfigParser.RawConfigParser()
             config.readfp(ini_fp)
@@ -1280,15 +1282,14 @@ def get_hosted_registry_insecure():
     hosted_registry_insecure = None
     if os.path.exists('/etc/sysconfig/docker'):
         try:
-            ini_str = unicode('[root]\n' + open('/etc/sysconfig/docker', 'r').read(), 'utf-8')
+            ini_str = text_type('[root]\n' + open('/etc/sysconfig/docker', 'r').read(), 'utf-8')
             ini_fp = io.StringIO(ini_str)
             config = ConfigParser.RawConfigParser()
             config.readfp(ini_fp)
             options = config.get('root', 'OPTIONS')
             if 'insecure-registry' in options:
                 hosted_registry_insecure = True
-        # pylint: disable=bare-except
-        except:
+        except Exception:  # pylint: disable=broad-except
             pass
     return hosted_registry_insecure
 
@@ -1449,7 +1450,7 @@ def merge_facts(orig, new, additive_facts_to_overwrite, protected_facts_to_overw
             if key in inventory_json_facts:
                 # Watchout for JSON facts that sometimes load as strings.
                 # (can happen if the JSON contains a boolean)
-                if isinstance(new[key], basestring):
+                if isinstance(new[key], string_types):
                     facts[key] = yaml.safe_load(new[key])
                 else:
                     facts[key] = copy.deepcopy(new[key])
@@ -1511,7 +1512,7 @@ def merge_facts(orig, new, additive_facts_to_overwrite, protected_facts_to_overw
     for key in new_keys:
         # Watchout for JSON facts that sometimes load as strings.
         # (can happen if the JSON contains a boolean)
-        if key in inventory_json_facts and isinstance(new[key], basestring):
+        if key in inventory_json_facts and isinstance(new[key], string_types):
             facts[key] = yaml.safe_load(new[key])
         else:
             facts[key] = copy.deepcopy(new[key])
@@ -1614,7 +1615,7 @@ def set_proxy_facts(facts):
     if 'common' in facts:
         common = facts['common']
         if 'http_proxy' in common or 'https_proxy' in common:
-            if 'no_proxy' in common and isinstance(common['no_proxy'], basestring):
+            if 'no_proxy' in common and isinstance(common['no_proxy'], string_types):
                 common['no_proxy'] = common['no_proxy'].split(",")
             elif 'no_proxy' not in common:
                 common['no_proxy'] = []
@@ -1636,7 +1637,7 @@ def set_proxy_facts(facts):
         if 'https_proxy' not in builddefaults and 'https_proxy' in common:
             builddefaults['https_proxy'] = common['https_proxy']
         # make no_proxy into a list if it's not
-        if 'no_proxy' in builddefaults and isinstance(builddefaults['no_proxy'], basestring):
+        if 'no_proxy' in builddefaults and isinstance(builddefaults['no_proxy'], string_types):
             builddefaults['no_proxy'] = builddefaults['no_proxy'].split(",")
         if 'no_proxy' not in builddefaults and 'no_proxy' in common:
             builddefaults['no_proxy'] = common['no_proxy']
@@ -2220,12 +2221,12 @@ class OpenShiftFacts(object):
                 key = '{0}_registries'.format(cat)
                 if key in new_local_facts['docker']:
                     val = new_local_facts['docker'][key]
-                    if isinstance(val, basestring):
+                    if isinstance(val, string_types):
                         val = [x.strip() for x in val.split(',')]
                     new_local_facts['docker'][key] = list(set(val) - set(['']))
             # Convert legacy log_options comma sep string to a list if present:
             if 'log_options' in new_local_facts['docker'] and \
-                    isinstance(new_local_facts['docker']['log_options'], basestring):
+                    isinstance(new_local_facts['docker']['log_options'], string_types):
                 new_local_facts['docker']['log_options'] = new_local_facts['docker']['log_options'].split(',')
 
         new_local_facts = self.remove_empty_facts(new_local_facts)

+ 1 - 0
utils/.coveragerc

@@ -1,4 +1,5 @@
 [run]
 omit=
     */lib/python*/site-packages/*
+    */lib/python*/*
     /usr/*

+ 11 - 7
utils/Makefile

@@ -33,8 +33,8 @@ MANPAGES := docs/man/man1/atomic-openshift-installer.1
 VERSION := 1.3
 
 # YAMLFILES: Skipping all '/files/' folders due to conflicting yaml file definitions
-YAMLFILES = $(shell find ../ -name $(VENV) -prune -o \( -name '*.yml' -o -name '*.yaml' \) ! -path "*/files/*" 2>&1)
-PYFILES = $(shell find ../ -name $(VENV) -prune -o -name ooinstall.egg-info -prune -o -name test -prune -o -name "*.py" -print)
+YAMLFILES = $(shell find ../ -name $(VENV) -prune -o -name .tox -prune -o \( -name '*.yml' -o -name '*.yaml' \) ! -path "*/files/*" -print 2>&1)
+PYFILES = $(shell find ../ -name $(VENV) -prune -o -name ooinstall.egg-info -prune -o -name test -prune -o -name .tox -prune -o -name "*.py" -print)
 
 sdist: clean
 	python setup.py sdist
@@ -83,7 +83,8 @@ ci-unittests: $(VENV)
 	@echo "#############################################"
 	@echo "# Running Unit Tests in virtualenv"
 	@echo "#############################################"
-	. $(VENV)/bin/activate && python setup.py nosetests
+	. $(VENV)/bin/activate && tox -e py27-unit
+	. $(VENV)/bin/activate && tox -e py35-unit
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 
 ci-pylint: $(VENV)
@@ -108,12 +109,15 @@ ci-flake8: $(VENV)
 	@echo "#############################################"
 	@echo "# Running Flake8 Compliance Tests in virtualenv"
 	@echo "#############################################"
-	. $(VENV)/bin/activate && flake8 --config=setup.cfg ../ --exclude="utils,../inventory"
-	. $(VENV)/bin/activate && python setup.py flake8
+	. $(VENV)/bin/activate && tox -e py27-flake8
+	. $(VENV)/bin/activate && tox -e py35-flake8
 
-ci: ci-list-deps ci-unittests ci-flake8 ci-pylint ci-yamllint
+ci-tox:
+	. $(VENV)/bin/activate && tox
+
+ci: ci-list-deps ci-tox ci-pylint ci-yamllint
 	@echo
 	@echo "##################################################################################"
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 	@echo "To clean your test environment run 'make clean'"
-	@echo "Other targets you may run with 'make': 'ci-pylint', 'ci-unittests', 'ci-flake8', 'ci-yamllint'"
+	@echo "Other targets you may run with 'make': 'ci-pylint', 'ci-tox', 'ci-unittests', 'ci-flake8', 'ci-yamllint'"

+ 2 - 2
utils/setup.cfg

@@ -17,5 +17,5 @@ cover-branches=1
 
 [flake8]
 max-line-length=120
-exclude=tests/*,setup.py
-ignore=E501,E121,E124
+exclude=test/*,setup.py,oo-installenv
+ignore=E501

+ 1 - 1
utils/src/ooinstall/cli_installer.py

@@ -501,7 +501,7 @@ def get_variant_and_version(multi_master=False):
 
     i = 1
     combos = get_variant_version_combos()
-    for (variant, version) in combos:
+    for (variant, _) in combos:
         message = "%s\n(%s) %s" % (message, i, variant.description)
         i = i + 1
     message = "%s\n" % message

+ 14 - 13
utils/src/ooinstall/oo_config.py

@@ -1,5 +1,7 @@
 # pylint: disable=bad-continuation,missing-docstring,no-self-use,invalid-name,too-many-instance-attributes,too-few-public-methods
 
+from __future__ import (absolute_import, print_function)
+
 import os
 import sys
 import logging
@@ -50,7 +52,7 @@ Error loading config. {}.
 See https://docs.openshift.com/enterprise/latest/install_config/install/quick_install.html#defining-an-installation-configuration-file
 for information on creating a configuration file or delete {} and re-run the installer.
 """
-    print message.format(error, path)
+    print(message.format(error, path))
 
 
 class OOConfigFileError(Exception):
@@ -103,7 +105,7 @@ class Host(object):
             # If the property is defined (not None or False), export it:
             if getattr(self, prop):
                 d[prop] = getattr(self, prop)
-        for variable, value in self.other_variables.iteritems():
+        for variable, value in self.other_variables.items():
             d[variable] = value
 
         return d
@@ -256,16 +258,16 @@ class OOConfig(object):
                 # Parse the hosts into DTO objects:
                 for host in host_list:
                     host['other_variables'] = {}
-                    for variable, value in host.iteritems():
+                    for variable, value in host.items():
                         if variable not in HOST_VARIABLES_BLACKLIST:
                             host['other_variables'][variable] = value
                     self.deployment.hosts.append(Host(**host))
 
                 # Parse the roles into Objects
-                for name, variables in role_list.iteritems():
+                for name, variables in role_list.items():
                     self.deployment.roles.update({name: Role(name, variables)})
 
-        except IOError, ferr:
+        except IOError as ferr:
             raise OOConfigFileError('Cannot open config file "{}": {}'.format(ferr.filename,
                                                                               ferr.strerror))
         except yaml.scanner.ScannerError:
@@ -354,14 +356,13 @@ class OOConfig(object):
         self.settings['ansible_inventory_path'] = \
             '{}/hosts'.format(os.path.dirname(self.config_path))
 
-        # pylint: disable=consider-iterating-dictionary
-        # Disabled because we shouldn't alter the container we're
-        # iterating over
-        #
         # clean up any empty sets
-        for setting in self.settings.keys():
+        empty_keys = []
+        for setting in self.settings:
             if not self.settings[setting]:
-                self.settings.pop(setting)
+                empty_keys.append(setting)
+        for key in empty_keys:
+            self.settings.pop(key)
 
         installer_log.debug("Updated OOConfig settings: %s", self.settings)
 
@@ -410,7 +411,7 @@ class OOConfig(object):
         for host in self.deployment.hosts:
             p_settings['deployment']['hosts'].append(host.to_dict())
 
-        for name, role in self.deployment.roles.iteritems():
+        for name, role in self.deployment.roles.items():
             p_settings['deployment']['roles'][name] = role.variables
 
         for setting in self.deployment.variables:
@@ -424,7 +425,7 @@ class OOConfig(object):
             if self.settings['ansible_inventory_directory'] != self._default_ansible_inv_dir():
                 p_settings['ansible_inventory_directory'] = self.settings['ansible_inventory_directory']
         except KeyError as e:
-            print "Error persisting settings: {}".format(e)
+            print("Error persisting settings: {}".format(e))
             sys.exit(0)
 
         return p_settings

+ 11 - 9
utils/src/ooinstall/openshift_ansible.py

@@ -1,5 +1,7 @@
 # pylint: disable=bad-continuation,missing-docstring,no-self-use,invalid-name,global-statement,global-variable-not-assigned
 
+from __future__ import (absolute_import, print_function)
+
 import socket
 import subprocess
 import sys
@@ -107,12 +109,12 @@ def write_inventory_vars(base_inventory, lb):
     global CFG
     base_inventory.write('\n[OSEv3:vars]\n')
 
-    for variable, value in CFG.settings.iteritems():
+    for variable, value in CFG.settings.items():
         inventory_var = VARIABLES_MAP.get(variable, None)
         if inventory_var and value:
             base_inventory.write('{}={}\n'.format(inventory_var, value))
 
-    for variable, value in CFG.deployment.variables.iteritems():
+    for variable, value in CFG.deployment.variables.items():
         inventory_var = VARIABLES_MAP.get(variable, variable)
         if value:
             base_inventory.write('{}={}\n'.format(inventory_var, value))
@@ -152,11 +154,11 @@ def write_inventory_vars(base_inventory, lb):
                              "'baseurl': '{}', "
                              "'enabled': 1, 'gpgcheck': 0}}]\n".format(os.environ['OO_INSTALL_PUDDLE_REPO']))
 
-    for name, role_obj in CFG.deployment.roles.iteritems():
+    for name, role_obj in CFG.deployment.roles.items():
         if role_obj.variables:
             group_name = ROLES_TO_GROUPS_MAP.get(name, name)
             base_inventory.write("\n[{}:vars]\n".format(group_name))
-            for variable, value in role_obj.variables.iteritems():
+            for variable, value in role_obj.variables.items():
                 inventory_var = VARIABLES_MAP.get(variable, variable)
                 if value:
                     base_inventory.write('{}={}\n'.format(inventory_var, value))
@@ -193,7 +195,7 @@ def write_host(host, role, inventory, schedulable=None):
             facts += ' {}={}'.format(HOST_VARIABLES_MAP.get(prop), getattr(host, prop))
 
     if host.other_variables:
-        for variable, value in host.other_variables.iteritems():
+        for variable, value in host.other_variables.items():
             facts += " {}={}".format(variable, value)
 
     if host.node_labels and role == 'node':
@@ -212,7 +214,7 @@ def write_host(host, role, inventory, schedulable=None):
         if os.geteuid() != 0:
             no_pwd_sudo = subprocess.call(['sudo', '-n', 'echo', 'openshift'])
             if no_pwd_sudo == 1:
-                print 'The atomic-openshift-installer requires sudo access without a password.'
+                print('The atomic-openshift-installer requires sudo access without a password.')
                 sys.exit(1)
             facts += ' ansible_become=yes'
 
@@ -245,9 +247,9 @@ def load_system_facts(inventory_file, os_facts_path, env_vars, verbose=False):
         installer_log.debug("Going to try to read this file: %s", CFG.settings['ansible_callback_facts_yaml'])
         try:
             callback_facts = yaml.safe_load(callback_facts_file)
-        except yaml.YAMLError, exc:
-            print "Error in {}".format(CFG.settings['ansible_callback_facts_yaml']), exc
-            print "Try deleting and rerunning the atomic-openshift-installer"
+        except yaml.YAMLError as exc:
+            print("Error in {}".format(CFG.settings['ansible_callback_facts_yaml']), exc)
+            print("Try deleting and rerunning the atomic-openshift-installer")
             sys.exit(1)
 
     return callback_facts, 0

+ 18 - 26
utils/src/ooinstall/variants.py

@@ -38,32 +38,24 @@ class Variant(object):
 
 
 # WARNING: Keep the versions ordered, most recent first:
-OSE = Variant('openshift-enterprise', 'OpenShift Container Platform',
-              [
-                  Version('3.4', 'openshift-enterprise'),
-              ]
-)
-
-REG = Variant('openshift-enterprise', 'Registry',
-              [
-                  Version('3.4', 'openshift-enterprise', 'registry'),
-              ]
-)
-
-origin = Variant('origin', 'OpenShift Origin',
-                 [
-                     Version('1.4', 'origin'),
-                 ]
-)
-
-LEGACY = Variant('openshift-enterprise', 'OpenShift Container Platform',
-                 [
-                     Version('3.3', 'openshift-enterprise'),
-                     Version('3.2', 'openshift-enterprise'),
-                     Version('3.1', 'openshift-enterprise'),
-                     Version('3.0', 'openshift-enterprise'),
-                 ]
-)
+OSE = Variant('openshift-enterprise', 'OpenShift Container Platform', [
+    Version('3.4', 'openshift-enterprise'),
+])
+
+REG = Variant('openshift-enterprise', 'Registry', [
+    Version('3.4', 'openshift-enterprise', 'registry'),
+])
+
+origin = Variant('origin', 'OpenShift Origin', [
+    Version('1.4', 'origin'),
+])
+
+LEGACY = Variant('openshift-enterprise', 'OpenShift Container Platform', [
+    Version('3.3', 'openshift-enterprise'),
+    Version('3.2', 'openshift-enterprise'),
+    Version('3.1', 'openshift-enterprise'),
+    Version('3.0', 'openshift-enterprise'),
+])
 
 # Ordered list of variants we can install, first is the default.
 SUPPORTED_VARIANTS = (OSE, REG, origin, LEGACY)

+ 1 - 1
utils/test-requirements.txt

@@ -1,5 +1,4 @@
 ansible
-enum
 configparser
 pylint
 nose
@@ -11,3 +10,4 @@ click
 backports.functools_lru_cache
 pyOpenSSL
 yamllint
+tox

+ 11 - 10
utils/test/cli_installer_tests.py

@@ -4,7 +4,8 @@
 
 import copy
 import os
-import ConfigParser
+
+from six.moves import configparser
 
 import ooinstall.cli_installer as cli
 
@@ -408,7 +409,7 @@ class UnattendedCliTests(OOCliFixture):
         result = self.runner.invoke(cli.cli, self.cli_args)
 
         if result.exception is None or result.exit_code != 1:
-            print "Exit code: %s" % result.exit_code
+            print("Exit code: %s" % result.exit_code)
             self.fail("Unexpected CLI return")
 
     # unattended with config file and all installed hosts (with --force)
@@ -523,7 +524,7 @@ class UnattendedCliTests(OOCliFixture):
         self.assert_result(result, 0)
 
         # Check the inventory file looks as we would expect:
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assertEquals('root',
             inventory.get('OSEv3:vars', 'ansible_ssh_user'))
@@ -566,7 +567,7 @@ class UnattendedCliTests(OOCliFixture):
         self.assertEquals('3.3', written_config['variant_version'])
 
         # Make sure the correct value was passed to ansible:
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assertEquals('openshift-enterprise',
             inventory.get('OSEv3:vars', 'deployment_type'))
@@ -594,7 +595,7 @@ class UnattendedCliTests(OOCliFixture):
         # and written to disk:
         self.assertEquals('3.3', written_config['variant_version'])
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assertEquals('openshift-enterprise',
             inventory.get('OSEv3:vars', 'deployment_type'))
@@ -830,7 +831,7 @@ class AttendedCliTests(OOCliFixture):
         written_config = read_yaml(self.config_file)
         self._verify_config_hosts(written_config, 4)
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assert_inventory_host_var(inventory, 'nodes', '10.0.0.1',
                                  'openshift_schedulable=False')
@@ -949,7 +950,7 @@ class AttendedCliTests(OOCliFixture):
         written_config = read_yaml(self.config_file)
         self._verify_config_hosts(written_config, 6)
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assert_inventory_host_var(inventory, 'nodes', '10.0.0.1',
                                        'openshift_schedulable=False')
@@ -990,7 +991,7 @@ class AttendedCliTests(OOCliFixture):
         written_config = read_yaml(self.config_file)
         self._verify_config_hosts(written_config, 5)
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assert_inventory_host_var(inventory, 'nodes', '10.0.0.1',
                                        'openshift_schedulable=True')
@@ -1082,7 +1083,7 @@ class AttendedCliTests(OOCliFixture):
         written_config = read_yaml(self.config_file)
         self._verify_config_hosts(written_config, 1)
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assert_inventory_host_var(inventory, 'nodes', '10.0.0.1',
                                        'openshift_schedulable=True')
@@ -1116,7 +1117,7 @@ class AttendedCliTests(OOCliFixture):
         written_config = read_yaml(self.config_file)
         self._verify_config_hosts(written_config, 4)
 
-        inventory = ConfigParser.ConfigParser(allow_no_value=True)
+        inventory = configparser.ConfigParser(allow_no_value=True)
         inventory.read(os.path.join(self.work_dir, 'hosts'))
         self.assert_inventory_host_var(inventory, 'nodes', '10.0.0.1',
                                  'openshift_schedulable=False')

+ 5 - 5
utils/test/fixture.py

@@ -65,13 +65,13 @@ class OOCliFixture(OOInstallFixture):
 
     def assert_result(self, result, exit_code):
         if result.exit_code != exit_code:
-            print "Unexpected result from CLI execution"
-            print "Exit code: %s" % result.exit_code
-            print "Exception: %s" % result.exception
-            print result.exc_info
+            print("Unexpected result from CLI execution")
+            print("Exit code: %s" % result.exit_code)
+            print("Exception: %s" % result.exception)
+            print(result.exc_info)
             import traceback
             traceback.print_exception(*result.exc_info)
-            print "Output:\n%s" % result.output
+            print("Output:\n%s" % result.output)
             self.fail("Exception during CLI execution")
 
     def _verify_load_facts(self, load_facts_mock):

+ 4 - 3
utils/test/oo_config_tests.py

@@ -2,13 +2,14 @@
 # repo. We will work on these over time.
 # pylint: disable=bad-continuation,missing-docstring,no-self-use,invalid-name
 
-import cStringIO
 import os
 import unittest
 import tempfile
 import shutil
 import yaml
 
+from six.moves import cStringIO
+
 from ooinstall.oo_config import OOConfig, Host, OOConfigInvalidHostError
 import ooinstall.openshift_ansible
 
@@ -244,7 +245,7 @@ class HostTests(OOInstallFixture):
         }
 
         new_node = Host(**yaml_props)
-        inventory = cStringIO.StringIO()
+        inventory = cStringIO()
         # This is what the 'write_host' function generates. write_host
         # has no return value, it just writes directly to the file
         # 'inventory' which in this test-case is a StringIO object
@@ -285,7 +286,7 @@ class HostTests(OOInstallFixture):
     #     }
 
     #     new_node = Host(**yaml_props)
-    #     inventory = cStringIO.StringIO()
+    #     inventory = cStringIO()
 
     #     # This is what the original 'write_host' function will
     #     # generate. write_host has no return value, it just writes

+ 6 - 6
utils/test/test_utils.py

@@ -2,6 +2,7 @@
 Unittests for ooinstall utils.
 """
 
+import six
 import unittest
 import logging
 import sys
@@ -28,9 +29,6 @@ class TestUtils(unittest.TestCase):
             mock.call('OO_FOO: bar'),
         ]
 
-        # python 2.x has assertItemsEqual, python 3.x has assertCountEqual
-        if sys.version_info.major > 3:
-            self.assertItemsEqual = self.assertCountEqual
 
     ######################################################################
     # Validate ooinstall.utils.debug_env functionality
@@ -40,7 +38,7 @@ class TestUtils(unittest.TestCase):
 
         with mock.patch('ooinstall.utils.installer_log') as _il:
             debug_env(self.debug_all_params)
-            print _il.debug.call_args_list
+            print(_il.debug.call_args_list)
 
             # Debug was called for each item we expect
             self.assertEqual(
@@ -48,7 +46,8 @@ class TestUtils(unittest.TestCase):
                 _il.debug.call_count)
 
             # Each item we expect was logged
-            self.assertItemsEqual(
+            six.assertCountEqual(
+                self,
                 self.expected,
                 _il.debug.call_args_list)
 
@@ -67,7 +66,8 @@ class TestUtils(unittest.TestCase):
                 _il.debug.call_count,
                 len(debug_some_params))
 
-            self.assertItemsEqual(
+            six.assertCountEqual(
+                self,
                 self.expected,
                 _il.debug.call_args_list)
 

+ 17 - 0
utils/tox.ini

@@ -0,0 +1,17 @@
+[tox]
+minversion=2.3.1
+envlist =
+    py{27,35}-{flake8,unit}
+skipsdist=True
+skip_missing_interpreters=True
+
+[testenv]
+usedevelop=True
+deps =
+    -rtest-requirements.txt
+    py35-flake8: flake8-bugbear
+
+commands =
+    flake8: flake8 --config=setup.cfg ../ --exclude="../utils,.tox,../inventory"
+    flake8: python setup.py flake8
+    unit: python setup.py nosetests