浏览代码

Merge pull request #3007 from detiber/toxification

More Toxification
Scott Dodson 8 年之前
父节点
当前提交
393eccd3a5

+ 5 - 0
.coveragerc

@@ -0,0 +1,5 @@
+[run]
+omit=
+    */lib/python*/site-packages/*
+    */lib/python*/*
+    /usr/*

+ 2 - 0
.gitignore

@@ -25,3 +25,5 @@ ansible.cfg
 .tox
 .tox
 .coverage
 .coverage
 *.egg-info
 *.egg-info
+.eggs
+cover

文件差异内容过多而无法显示
+ 86 - 87
git/.pylintrc


+ 3 - 1
.travis.yml

@@ -11,8 +11,10 @@ python:
 
 
 install:
 install:
   - pip install -r requirements.txt
   - pip install -r requirements.txt
+  - pip install tox-travis
 
 
 script:
 script:
   # TODO(rhcarvalho): check syntax of other important entrypoint playbooks
   # TODO(rhcarvalho): check syntax of other important entrypoint playbooks
   - ansible-playbook --syntax-check playbooks/byo/config.yml
   - ansible-playbook --syntax-check playbooks/byo/config.yml
-  - cd utils && make ci
+  - tox
+  - cd utils && tox

git/.yamllint → .yamllint


+ 37 - 12
CONTRIBUTING.md

@@ -66,30 +66,55 @@ These are plugins used in playbooks and roles:
 └── test                Contains tests.
 └── test                Contains tests.
 ```
 ```
 
 
-### Others
-
-```
-.
-└── git                 Contains some helper scripts for repository maintenance.
-```
-
 ## Building RPMs
 ## Building RPMs
 
 
 See the [RPM build instructions](BUILD.md).
 See the [RPM build instructions](BUILD.md).
 
 
 ## Running tests
 ## Running tests
 
 
-We use [Nose](http://readthedocs.org/docs/nose/) as a test runner. Make sure it
-is installed along with other test dependencies:
+This section covers how to run tests for the root of this repo, running tests 
+for the oo-install wrapper is described in [utils/README.md](utils/README.md).
+
+We use [tox](http://readthedocs.org/docs/tox/) to manage virtualenvs and run
+tests. Alternatively, tests can be run using
+[detox](https://pypi.python.org/pypi/detox/) which allows for running tests in
+parallel
+
 
 
 ```
 ```
-pip install -r utils/test-requirements.txt
+pip install tox detox
 ```
 ```
 
 
-Run the tests with:
+List the test environments available:
+```
+tox -l
+```
+
+Run all of the tests with:
+```
+tox
+```
+
+Run all of the tests in parallel with detox:
+```
+detox
+```
+
+Running a particular test environment (python 2.7 flake8 tests in this case):
+```
+tox -e py27-ansible22-flake8
+```
+
+Running a particular test environment in a clean virtualenv (python 3.5 pylint
+tests in this case):
+```
+tox -r -e py35-ansible22-pylint
+```
 
 
+If you want to enter the virtualenv created by tox to do additional
+testing/debugging (py27-flake8 env in this case):
 ```
 ```
-nosetests
+source .tox/py27-ansible22-flake8/bin/activate
 ```
 ```
 
 
 ## Submitting contributions
 ## Submitting contributions

+ 0 - 97
git/parent.py

@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-# flake8: noqa
-# pylint: skip-file
-'''
-  Script to determine if this commit has also
-  been merged through the stage branch
-'''
-#
-#  Usage:
-#    parent_check.py <branch> <commit_id>
-#
-#
-import sys
-import subprocess
-
-def run_cli_cmd(cmd, in_stdout=None, in_stderr=None):
-    '''Run a command and return its output'''
-    if not in_stderr:
-        proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
-    else:
-        proc = subprocess.check_output(cmd, bufsize=-1, stdout=in_stdout, stderr=in_stderr, shell=False)
-    stdout, stderr = proc.communicate()
-    if proc.returncode != 0:
-        return {"rc": proc.returncode, "error": stderr}
-    else:
-        return {"rc": proc.returncode, "result": stdout}
-
-def main():
-    '''Check to ensure that the commit that is currently
-       being submitted is also in the stage branch.
-
-       if it is, succeed
-       else, fail
-    '''
-    branch = 'prod'
-
-    if sys.argv[1] != branch:
-        sys.exit(0)
-
-    # git co stg
-    results = run_cli_cmd(['/usr/bin/git', 'checkout', 'stg'])
-
-    # git pull latest
-    results = run_cli_cmd(['/usr/bin/git', 'pull'])
-
-    # setup on the <prod> branch in git
-    results = run_cli_cmd(['/usr/bin/git', 'checkout', 'prod'])
-
-    results = run_cli_cmd(['/usr/bin/git', 'pull'])
-    # merge the passed in commit into my current <branch>
-
-    commit_id = sys.argv[2]
-    results = run_cli_cmd(['/usr/bin/git', 'merge', commit_id])
-
-    # get the differences from stg and <branch>
-    results = run_cli_cmd(['/usr/bin/git', 'rev-list', '--left-right', 'stg...prod'])
-
-    # exit here with error code if the result coming back is an error
-    if results['rc'] != 0:
-        print results['error']
-        sys.exit(results['rc'])
-
-    count = 0
-    # Each 'result' is a commit
-    # Walk through each commit and see if it is in stg
-    for commit in results['result'].split('\n'):
-
-        # continue if it is already in stg
-        if not commit or commit.startswith('<'):
-            continue
-
-        # remove the first char '>'
-        commit = commit[1:]
-
-        # check if any remote branches contain $commit
-        results = run_cli_cmd(['/usr/bin/git', 'branch', '-q', '-r', '--contains', commit], in_stderr=None)
-
-        # if this comes back empty, nothing contains it, we can skip it as
-        # we have probably created the merge commit here locally
-        if results['rc'] == 0 and len(results['result']) == 0:
-            continue
-
-        # The results generally contain origin/pr/246/merge and origin/pr/246/head
-        # this is the pull request which would contain the commit in question.
-        #
-        # If the results do not contain origin/stg then stage does not contain
-        # the commit in question.  Therefore we need to alert!
-        if 'origin/stg' not in results['result']:
-            print "\nFAILED: (These commits are not in stage.)\n"
-            print "\t%s" % commit
-            count += 1
-
-    # Exit with count of commits in #{branch} but not stg
-    sys.exit(count)
-
-if __name__ == '__main__':
-    main()

+ 0 - 51
git/pylint.sh

@@ -1,51 +0,0 @@
-#!/usr/bin/env bash
-set -eu
-
-ANSIBLE_UPSTREAM_FILES=(
-    'inventory/aws/hosts/ec2.py'
-    'inventory/gce/hosts/gce.py'
-    'inventory/libvirt/hosts/libvirt_generic.py'
-    'inventory/openstack/hosts/nova.py'
-    'lookup_plugins/sequence.py'
-    'playbooks/gce/openshift-cluster/library/gce.py'
-  )
-
-OLDREV=$1
-NEWREV=$2
-#TRG_BRANCH=$3
-
-PYTHON=$(which python)
-
-set +e
-PY_DIFF=$(/usr/bin/git diff --name-only $OLDREV $NEWREV --diff-filter=ACM | grep ".py$")
-set -e
-
-FILES_TO_TEST=""
-
-for PY_FILE in $PY_DIFF; do
-  IGNORE_FILE=false
-  for UPSTREAM_FILE in "${ANSIBLE_UPSTREAM_FILES[@]}"; do
-    if [ "${PY_FILE}" == "${UPSTREAM_FILE}" ]; then
-      IGNORE_FILE=true
-      break
-    fi
-  done
-
-  if [ "${IGNORE_FILE}" == true ]; then
-    echo "Skipping file ${PY_FILE} as an upstream Ansible file..."
-    continue
-  fi
-
-  if [ -e "${PY_FILE}" ]; then
-    FILES_TO_TEST="${FILES_TO_TEST} ${PY_FILE}"
-  fi
-done
-
-export PYTHONPATH=${WORKSPACE}/utils/src/:${WORKSPACE}/utils/test/
-
-if [ "${FILES_TO_TEST}" != "" ]; then
-  echo "Testing files: ${FILES_TO_TEST}"
-  exec ${PYTHON} -m pylint --rcfile ${WORKSPACE}/git/.pylintrc ${FILES_TO_TEST}
-else
-  exit 0
-fi

+ 0 - 73
git/yaml_validation.py

@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# flake8: noqa
-#
-#  python yaml validator for a git commit
-#
-'''
-python yaml validator for a git commit
-'''
-import shutil
-import sys
-import os
-import tempfile
-import subprocess
-import yaml
-
-def get_changes(oldrev, newrev, tempdir):
-    '''Get a list of git changes from oldrev to newrev'''
-    proc = subprocess.Popen(['/usr/bin/git', 'diff', '--name-only', oldrev,
-                             newrev, '--diff-filter=ACM'], stdout=subprocess.PIPE)
-    stdout, _ = proc.communicate()
-    files = stdout.split('\n')
-
-    # No file changes
-    if not files:
-        return []
-
-    cmd = '/usr/bin/git archive %s %s | /bin/tar x -C %s' % (newrev, " ".join(files), tempdir)
-    proc = subprocess.Popen(cmd, shell=True)
-    _, _ = proc.communicate()
-
-    rfiles = []
-    for dirpath, _, fnames in os.walk(tempdir):
-        for fname in fnames:
-            rfiles.append(os.path.join(dirpath, fname))
-
-    return rfiles
-
-def main():
-    '''
-    Perform yaml validation
-    '''
-    results = []
-    try:
-        tmpdir = tempfile.mkdtemp(prefix='jenkins-git-')
-        old, new, _ = sys.argv[1:]
-
-        for file_mod in get_changes(old, new, tmpdir):
-
-            print "+++++++ Received: %s" % file_mod
-
-            # if the file extensions is not yml or yaml, move along.
-            if not file_mod.endswith('.yml') and not file_mod.endswith('.yaml'):
-                continue
-
-            # We use symlinks in our repositories, ignore them.
-            if os.path.islink(file_mod):
-                continue
-
-            try:
-                yaml.load(open(file_mod))
-                results.append(True)
-
-            except yaml.scanner.ScannerError as yerr:
-                print yerr
-                results.append(False)
-    finally:
-        shutil.rmtree(tmpdir)
-
-    if not all(results):
-        sys.exit(1)
-
-if __name__ == "__main__":
-    main()

+ 5 - 5
inventory/libvirt/hosts/libvirt_generic.py

@@ -61,11 +61,11 @@ class LibvirtInventory(object):
         self.parse_cli_args()
         self.parse_cli_args()
 
 
         if self.args.host:
         if self.args.host:
-            print _json_format_dict(self.get_host_info(), self.args.pretty)
+            print(_json_format_dict(self.get_host_info(), self.args.pretty))
         elif self.args.list:
         elif self.args.list:
-            print _json_format_dict(self.get_inventory(), self.args.pretty)
+            print(_json_format_dict(self.get_inventory(), self.args.pretty))
         else:  # default action with no options
         else:  # default action with no options
-            print _json_format_dict(self.get_inventory(), self.args.pretty)
+            print(_json_format_dict(self.get_inventory(), self.args.pretty))
 
 
     def read_settings(self):
     def read_settings(self):
         ''' Reads the settings from the libvirt.ini file '''
         ''' Reads the settings from the libvirt.ini file '''
@@ -115,12 +115,12 @@ class LibvirtInventory(object):
 
 
         conn = libvirt.openReadOnly(self.libvirt_uri)
         conn = libvirt.openReadOnly(self.libvirt_uri)
         if conn is None:
         if conn is None:
-            print "Failed to open connection to %s" % self.libvirt_uri
+            print("Failed to open connection to %s" % self.libvirt_uri)
             sys.exit(1)
             sys.exit(1)
 
 
         domains = conn.listAllDomains()
         domains = conn.listAllDomains()
         if domains is None:
         if domains is None:
-            print "Failed to list domains for connection %s" % self.libvirt_uri
+            print("Failed to list domains for connection %s" % self.libvirt_uri)
             sys.exit(1)
             sys.exit(1)
 
 
         for domain in domains:
         for domain in domains:

+ 1 - 0
openshift-ansible.spec

@@ -15,6 +15,7 @@ BuildArch:      noarch
 
 
 Requires:      ansible >= 2.2.0.0-1
 Requires:      ansible >= 2.2.0.0-1
 Requires:      python2
 Requires:      python2
+Requires:      python-six
 Requires:      openshift-ansible-docs = %{version}-%{release}
 Requires:      openshift-ansible-docs = %{version}-%{release}
 
 
 %description
 %description

+ 3 - 1
requirements.txt

@@ -1,2 +1,4 @@
-ansible>=2.1
+ansible>=2.2
+six
 pyOpenSSL
 pyOpenSSL
+PyYAML

+ 14 - 11
roles/openshift_certificate_expiry/library/openshift_cert_expiry.py

@@ -4,17 +4,13 @@
 
 
 """For details on this module see DOCUMENTATION (below)"""
 """For details on this module see DOCUMENTATION (below)"""
 
 
-# router/registry cert grabbing
-import subprocess
-# etcd config file
-import ConfigParser
-# Expiration parsing
 import datetime
 import datetime
-# File path stuff
 import os
 import os
-# Config file parsing
+import subprocess
+
+from six.moves import configparser
+
 import yaml
 import yaml
-# Certificate loading
 import OpenSSL.crypto
 import OpenSSL.crypto
 
 
 DOCUMENTATION = '''
 DOCUMENTATION = '''
@@ -260,7 +256,10 @@ Return:
 # This is our module MAIN function after all, so there's bound to be a
 # This is our module MAIN function after all, so there's bound to be a
 # lot of code bundled up into one block
 # lot of code bundled up into one block
 #
 #
-# pylint: disable=too-many-locals,too-many-locals,too-many-statements,too-many-branches
+# Reason: These checks are disabled because the issue was introduced
+# during a period where the pylint checks weren't enabled for this file
+# Status: temporarily disabled pending future refactoring
+# pylint: disable=too-many-locals,too-many-statements,too-many-branches
 def main():
 def main():
     """This module examines certificates (in various forms) which compose
     """This module examines certificates (in various forms) which compose
 an OpenShift Container Platform cluster
 an OpenShift Container Platform cluster
@@ -479,13 +478,17 @@ an OpenShift Container Platform cluster
     etcd_cert_params.append('dne')
     etcd_cert_params.append('dne')
     try:
     try:
         with open('/etc/etcd/etcd.conf', 'r') as fp:
         with open('/etc/etcd/etcd.conf', 'r') as fp:
-            etcd_config = ConfigParser.ConfigParser()
+            etcd_config = configparser.ConfigParser()
+            # Reason: This check is disabled because the issue was introduced
+            # during a period where the pylint checks weren't enabled for this file
+            # Status: temporarily disabled pending future refactoring
+            # pylint: disable=deprecated-method
             etcd_config.readfp(FakeSecHead(fp))
             etcd_config.readfp(FakeSecHead(fp))
 
 
         for param in etcd_cert_params:
         for param in etcd_cert_params:
             try:
             try:
                 etcd_certs_to_check.add(etcd_config.get('ETCD', param))
                 etcd_certs_to_check.add(etcd_config.get('ETCD', param))
-            except ConfigParser.NoOptionError:
+            except configparser.NoOptionError:
                 # That parameter does not exist, oh well...
                 # That parameter does not exist, oh well...
                 pass
                 pass
     except IOError:
     except IOError:

+ 7 - 14
roles/openshift_facts/library/openshift_facts.py

@@ -7,13 +7,6 @@
 
 
 """Ansible module for retrieving and setting openshift related facts"""
 """Ansible module for retrieving and setting openshift related facts"""
 
 
-try:
-    # python2
-    import ConfigParser
-except ImportError:
-    # python3
-    import configparser as ConfigParser
-
 # pylint: disable=no-name-in-module, import-error, wrong-import-order
 # pylint: disable=no-name-in-module, import-error, wrong-import-order
 import copy
 import copy
 import errno
 import errno
@@ -26,8 +19,8 @@ import struct
 import socket
 import socket
 from distutils.util import strtobool
 from distutils.util import strtobool
 from distutils.version import LooseVersion
 from distutils.version import LooseVersion
-from six import string_types
-from six import text_type
+from six import string_types, text_type
+from six.moves import configparser
 
 
 # ignore pylint errors related to the module_utils import
 # ignore pylint errors related to the module_utils import
 # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import
 # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import
@@ -776,7 +769,7 @@ def set_etcd_facts_if_unset(facts):
             # Add a fake section for parsing:
             # Add a fake section for parsing:
             ini_str = text_type('[root]\n' + open('/etc/etcd/etcd.conf', 'r').read(), 'utf-8')
             ini_str = text_type('[root]\n' + open('/etc/etcd/etcd.conf', 'r').read(), 'utf-8')
             ini_fp = io.StringIO(ini_str)
             ini_fp = io.StringIO(ini_str)
-            config = ConfigParser.RawConfigParser()
+            config = configparser.RawConfigParser()
             config.readfp(ini_fp)
             config.readfp(ini_fp)
             etcd_data_dir = config.get('root', 'ETCD_DATA_DIR')
             etcd_data_dir = config.get('root', 'ETCD_DATA_DIR')
             if etcd_data_dir.startswith('"') and etcd_data_dir.endswith('"'):
             if etcd_data_dir.startswith('"') and etcd_data_dir.endswith('"'):
@@ -1299,7 +1292,7 @@ def get_hosted_registry_insecure():
         try:
         try:
             ini_str = text_type('[root]\n' + open('/etc/sysconfig/docker', 'r').read(), 'utf-8')
             ini_str = text_type('[root]\n' + open('/etc/sysconfig/docker', 'r').read(), 'utf-8')
             ini_fp = io.StringIO(ini_str)
             ini_fp = io.StringIO(ini_str)
-            config = ConfigParser.RawConfigParser()
+            config = configparser.RawConfigParser()
             config.readfp(ini_fp)
             config.readfp(ini_fp)
             options = config.get('root', 'OPTIONS')
             options = config.get('root', 'OPTIONS')
             if 'insecure-registry' in options:
             if 'insecure-registry' in options:
@@ -1568,15 +1561,15 @@ def get_local_facts_from_file(filename):
     local_facts = dict()
     local_facts = dict()
     try:
     try:
         # Handle conversion of INI style facts file to json style
         # Handle conversion of INI style facts file to json style
-        ini_facts = ConfigParser.SafeConfigParser()
+        ini_facts = configparser.SafeConfigParser()
         ini_facts.read(filename)
         ini_facts.read(filename)
         for section in ini_facts.sections():
         for section in ini_facts.sections():
             local_facts[section] = dict()
             local_facts[section] = dict()
             for key, value in ini_facts.items(section):
             for key, value in ini_facts.items(section):
                 local_facts[section][key] = value
                 local_facts[section][key] = value
 
 
-    except (ConfigParser.MissingSectionHeaderError,
-            ConfigParser.ParsingError):
+    except (configparser.MissingSectionHeaderError,
+            configparser.ParsingError):
         try:
         try:
             with open(filename, 'r') as facts_file:
             with open(filename, 'r') as facts_file:
                 local_facts = json.load(facts_file)
                 local_facts = json.load(facts_file)

+ 27 - 0
setup.cfg

@@ -0,0 +1,27 @@
+[bdist_wheel]
+# This flag says that the code is written to work on both Python 2 and Python
+# 3. If at all possible, it is good practice to do this. If you cannot, you
+# will need to generate wheels for each Python version that you support.
+universal=1
+
+[nosetests]
+tests=roles/openshift_master_facts/test/, test/
+verbosity=2
+with-coverage=1
+cover-html=1
+cover-inclusive=1
+cover-min-percentage=70
+cover-erase=1
+detailed-errors=1
+cover-branches=1
+
+[yamllint]
+excludes=.tox,utils,files
+
+[lint]
+lint_disable=fixme,locally-disabled,file-ignored,duplicate-code
+
+[flake8]
+exclude=.tox/*,setup.py,utils/*,inventory/*
+max_line_length = 120
+ignore = E501,T003

+ 191 - 0
setup.py

@@ -0,0 +1,191 @@
+"""A setuptools based setup module.
+
+"""
+from __future__ import print_function
+
+import os
+import fnmatch
+import re
+
+import yaml
+
+# Always prefer setuptools over distutils
+from setuptools import setup, Command
+from setuptools_lint.setuptools_command import PylintCommand
+from six import string_types
+from yamllint.config import YamlLintConfig
+from yamllint.cli import Format
+from yamllint import linter
+
+def find_files(base_dir, exclude_dirs, include_dirs, file_regex):
+    ''' find files matching file_regex '''
+    found = []
+    exclude_regex = ''
+    include_regex = ''
+
+    if exclude_dirs is not None:
+        exclude_regex = r'|'.join([fnmatch.translate(x) for x in exclude_dirs]) or r'$.'
+
+    if include_dirs is not None:
+        include_regex = r'|'.join([fnmatch.translate(x) for x in include_dirs]) or r'$.'
+
+    for root, dirs, files in os.walk(base_dir):
+        if exclude_dirs is not None:
+            # filter out excludes for dirs
+            dirs[:] = [d for d in dirs if not re.match(exclude_regex, d)]
+
+        if include_dirs is not None:
+            # filter for includes for dirs
+            dirs[:] = [d for d in dirs if re.match(include_regex, d)]
+
+        matches = [os.path.join(root, f) for f in files if re.search(file_regex, f) is not None]
+        found.extend(matches)
+
+    return found
+
+
+class OpenShiftAnsibleYamlLint(Command):
+    ''' Command to run yamllint '''
+    description = "Run yamllint tests"
+    user_options = [
+        ('excludes=', 'e', 'directories to exclude'),
+        ('config-file=', 'c', 'config file to use'),
+        ('format=', 'f', 'format to use (standard, parsable)'),
+    ]
+
+    def initialize_options(self):
+        ''' initialize_options '''
+        # Reason: Defining these attributes as a part of initialize_options is
+        # consistent with upstream usage
+        # Status: permanently disabled
+        # pylint: disable=attribute-defined-outside-init
+        self.excludes = None
+        self.config_file = None
+        self.format = None
+
+    def finalize_options(self):
+        ''' finalize_options '''
+        # Reason: These attributes are defined in initialize_options and this
+        # usage is consistant with upstream usage
+        # Status: permanently disabled
+        # pylint: disable=attribute-defined-outside-init
+        if isinstance(self.excludes, string_types):
+            self.excludes = self.excludes.split(',')
+        if self.format is None:
+            self.format = 'standard'
+        assert (self.format in ['standard', 'parsable']), (
+            'unknown format {0}.'.format(self.format))
+        if self.config_file is None:
+            self.config_file = '.yamllint'
+        assert os.path.isfile(self.config_file), (
+            'yamllint config file {0} does not exist.'.format(self.config_file))
+
+    def run(self):
+        ''' run command '''
+        if self.excludes is not None:
+            print("Excludes:\n{0}".format(yaml.dump(self.excludes, default_flow_style=False)))
+
+        config = YamlLintConfig(file=self.config_file)
+
+        has_errors = False
+        has_warnings = False
+
+        if self.format == 'parsable':
+            format_method = Format.parsable
+        else:
+            format_method = Format.standard_color
+
+        for yaml_file in find_files(os.getcwd(), self.excludes, None, r'\.ya?ml$'):
+            first = True
+            with open(yaml_file, 'r') as contents:
+                for problem in linter.run(contents, config):
+                    if first and self.format != 'parsable':
+                        print('\n{0}:'.format(os.path.relpath(yaml_file)))
+                        first = False
+
+                    print(format_method(problem, yaml_file))
+                    if problem.level == linter.PROBLEM_LEVELS['error']:
+                        has_errors = True
+                    elif problem.level == linter.PROBLEM_LEVELS['warning']:
+                        has_warnings = True
+
+        assert not has_errors, 'yamllint errors found'
+        assert not has_warnings, 'yamllint warnings found'
+
+
+class OpenShiftAnsiblePylint(PylintCommand):
+    ''' Class to override the default behavior of PylintCommand '''
+
+    # Reason: This method needs to be an instance method to conform to the
+    # overridden method's signature
+    # Status: permanently disabled
+    # pylint: disable=no-self-use
+    def find_all_modules(self):
+        ''' find all python files to test '''
+        exclude_dirs = ['.tox', 'utils', 'test', 'tests', 'git']
+        modules = []
+        for match in find_files(os.getcwd(), exclude_dirs, None, r'\.py$'):
+            package = os.path.basename(match).replace('.py', '')
+            modules.append(('openshift_ansible', package, match))
+        return modules
+
+    def get_finalized_command(self, cmd):
+        ''' override get_finalized_command to ensure we use our
+        find_all_modules method '''
+        if cmd == 'build_py':
+            return self
+
+    # Reason: This method needs to be an instance method to conform to the
+    # overridden method's signature
+    # Status: permanently disabled
+    # pylint: disable=no-self-use
+    def with_project_on_sys_path(self, func, func_args, func_kwargs):
+        ''' override behavior, since we don't need to build '''
+        return func(*func_args, **func_kwargs)
+
+
+class UnsupportedCommand(Command):
+    ''' Basic Command to override unsupported commands '''
+    user_options = []
+
+    # Reason: This method needs to be an instance method to conform to the
+    # overridden method's signature
+    # Status: permanently disabled
+    # pylint: disable=no-self-use
+    def initialize_options(self):
+        ''' initialize_options '''
+        pass
+
+    # Reason: This method needs to be an instance method to conform to the
+    # overridden method's signature
+    # Status: permanently disabled
+    # pylint: disable=no-self-use
+    def finalize_options(self):
+        ''' initialize_options '''
+        pass
+
+    # Reason: This method needs to be an instance method to conform to the
+    # overridden method's signature
+    # Status: permanently disabled
+    # pylint: disable=no-self-use
+    def run(self):
+        ''' run command '''
+        print("Unsupported command for openshift-ansible")
+
+
+setup(
+    name='openshift-ansible',
+    license="Apache 2.0",
+    cmdclass={
+        'install': UnsupportedCommand,
+        'develop': UnsupportedCommand,
+        'build': UnsupportedCommand,
+        'build_py': UnsupportedCommand,
+        'build_ext': UnsupportedCommand,
+        'egg_info': UnsupportedCommand,
+        'sdist': UnsupportedCommand,
+        'lint': OpenShiftAnsiblePylint,
+        'yamllint': OpenShiftAnsibleYamlLint,
+    },
+    packages=[],
+)

+ 11 - 0
test-requirements.txt

@@ -0,0 +1,11 @@
+six
+pyOpenSSL
+flake8
+flake8-mutable
+flake8-print
+pylint
+setuptools-lint
+PyYAML
+yamllint
+nose
+coverage

+ 19 - 0
tox.ini

@@ -0,0 +1,19 @@
+[tox]
+minversion=2.3.1
+envlist =
+    py{27,35}-ansible22-{pylint,unit,flake8}
+    yamllint
+skipsdist=True
+skip_missing_interpreters=True
+
+[testenv]
+deps =
+    -rtest-requirements.txt
+    py35-flake8: flake8-bugbear
+    ansible22: ansible~=2.2
+
+commands =
+    flake8: flake8
+    pylint: python setup.py lint
+    yamllint: python setup.py yamllint
+    unit: nosetests

+ 1 - 0
utils/.pylintrc

@@ -0,0 +1 @@
+../.pylintrc

+ 8 - 22
utils/Makefile

@@ -46,7 +46,7 @@ clean:
 	@find . -type f \( -name "*~" -or -name "#*" \) -delete
 	@find . -type f \( -name "*~" -or -name "#*" \) -delete
 	@rm -fR build dist rpm-build MANIFEST htmlcov .coverage cover ooinstall.egg-info oo-install
 	@rm -fR build dist rpm-build MANIFEST htmlcov .coverage cover ooinstall.egg-info oo-install
 	@rm -fR $(VENV)
 	@rm -fR $(VENV)
-
+	@rm -fR .tox
 
 
 # To force a rebuild of the docs run 'touch' on any *.in file under
 # To force a rebuild of the docs run 'touch' on any *.in file under
 # docs/man/man1/
 # docs/man/man1/
@@ -84,41 +84,27 @@ ci-unittests: $(VENV)
 	@echo "#############################################"
 	@echo "#############################################"
 	@echo "# Running Unit Tests in virtualenv"
 	@echo "# Running Unit Tests in virtualenv"
 	@echo "#############################################"
 	@echo "#############################################"
-	. $(VENV)/bin/activate && tox -e py27-unit
-	. $(VENV)/bin/activate && tox -e py35-unit
+	. $(VENV)/bin/activate && detox -e py27-unit,py35-unit
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 
 
 ci-pylint: $(VENV)
 ci-pylint: $(VENV)
 	@echo "#############################################"
 	@echo "#############################################"
 	@echo "# Running PyLint Tests in virtualenv"
 	@echo "# Running PyLint Tests in virtualenv"
 	@echo "#############################################"
 	@echo "#############################################"
-	. $(VENV)/bin/activate && python -m pylint --rcfile ../git/.pylintrc $(PYFILES)
-
-ci-yamllint: $(VENV)
-	@echo "#############################################"
-	@echo "# Running yamllint Tests in virtualenv"
-	@echo "#############################################"
-	@. $(VENV)/bin/activate && yamllint -c ../git/.yamllint $(YAMLFILES)
-
-ci-list-deps: $(VENV)
-	@echo "#############################################"
-	@echo "# Listing all pip deps"
-	@echo "#############################################"
-	. $(VENV)/bin/activate && pip freeze
+	. $(VENV)/bin/activate && detox -e py27-pylint,py35-pylint
 
 
 ci-flake8: $(VENV)
 ci-flake8: $(VENV)
 	@echo "#############################################"
 	@echo "#############################################"
 	@echo "# Running Flake8 Compliance Tests in virtualenv"
 	@echo "# Running Flake8 Compliance Tests in virtualenv"
 	@echo "#############################################"
 	@echo "#############################################"
-	. $(VENV)/bin/activate && tox -e py27-flake8
-	. $(VENV)/bin/activate && tox -e py35-flake8
+	. $(VENV)/bin/activate && detox -e py27-flake8,py35-flake8
 
 
-ci-tox:
-	. $(VENV)/bin/activate && tox
+ci-tox: $(VENV)
+	. $(VENV)/bin/activate && detox
 
 
-ci: ci-list-deps ci-tox ci-pylint ci-yamllint
+ci: ci-tox
 	@echo
 	@echo
 	@echo "##################################################################################"
 	@echo "##################################################################################"
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 	@echo "VIEW CODE COVERAGE REPORT WITH 'xdg-open cover/index.html' or run 'make viewcover'"
 	@echo "To clean your test environment run 'make clean'"
 	@echo "To clean your test environment run 'make clean'"
-	@echo "Other targets you may run with 'make': 'ci-pylint', 'ci-tox', 'ci-unittests', 'ci-flake8', 'ci-yamllint'"
+	@echo "Other targets you may run with 'make': 'ci-pylint', 'ci-tox', 'ci-unittests', 'ci-flake8'"

+ 41 - 0
utils/README.md

@@ -6,6 +6,47 @@ Run the command:
 
 
 to run an array of unittests locally.
 to run an array of unittests locally.
 
 
+Underneath the covers, we use [tox](http://readthedocs.org/docs/tox/) to manage virtualenvs and run
+tests. Alternatively, tests can be run using [detox](https://pypi.python.org/pypi/detox/) which allows
+for running tests in parallel
+
+
+```
+pip install tox detox
+```
+
+List the test environments available:
+```
+tox -l
+```
+
+Run all of the tests with:
+```
+tox
+```
+
+Run all of the tests in parallel with detox:
+```
+detox
+```
+
+Running a particular test environment (python 2.7 flake8 tests in this case):
+```
+tox -e py27-ansible22-flake8
+```
+
+Running a particular test environment in a clean virtualenv (python 3.5 pylint
+tests in this case):
+```
+tox -r -e py35-ansible22-pylint
+```
+
+If you want to enter the virtualenv created by tox to do additional
+testing/debugging (py27-flake8 env in this case):
+```
+source .tox/py27-ansible22-flake8/bin/activate
+```
+
 You will get errors if the log files already exist and can not be
 You will get errors if the log files already exist and can not be
 written to by the current user (`/tmp/ansible.log` and
 written to by the current user (`/tmp/ansible.log` and
 `/tmp/installer.txt`). *We're working on it.*
 `/tmp/installer.txt`). *We're working on it.*

+ 3 - 1
utils/setup.cfg

@@ -5,7 +5,6 @@
 universal=1
 universal=1
 
 
 [nosetests]
 [nosetests]
-tests=../,../roles/openshift_master_facts/test/,test/
 verbosity=2
 verbosity=2
 with-coverage=1
 with-coverage=1
 cover-html=1
 cover-html=1
@@ -19,3 +18,6 @@ cover-branches=1
 max-line-length=120
 max-line-length=120
 exclude=test/*,setup.py,oo-installenv
 exclude=test/*,setup.py,oo-installenv
 ignore=E501
 ignore=E501
+
+[lint]
+lint_disable=fixme,locally-disabled,file-ignored,duplicate-code

+ 2 - 0
utils/test-requirements.txt

@@ -1,6 +1,7 @@
 ansible
 ansible
 configparser
 configparser
 pylint
 pylint
+setuptools-lint
 nose
 nose
 coverage
 coverage
 mock
 mock
@@ -11,3 +12,4 @@ backports.functools_lru_cache
 pyOpenSSL
 pyOpenSSL
 yamllint
 yamllint
 tox
 tox
+detox

+ 73 - 0
utils/test/openshift_ansible_tests.py

@@ -0,0 +1,73 @@
+import os
+import unittest
+import tempfile
+import shutil
+import yaml
+
+from six.moves import configparser
+
+from ooinstall import openshift_ansible
+from ooinstall.oo_config import Host, OOConfig
+
+
+BASE_CONFIG = """
+---
+variant: openshift-enterprise
+variant_version: 3.3
+version: v2
+deployment:
+    ansible_ssh_user: cloud-user
+    hosts: []
+    roles:
+        master:
+        node:
+"""
+
+
+class TestOpenShiftAnsible(unittest.TestCase):
+
+    def setUp(self):
+        self.tempfiles = []
+        self.work_dir = tempfile.mkdtemp(prefix='openshift_ansible_tests')
+        self.configfile = os.path.join(self.work_dir, 'ooinstall.config')
+        with open(self.configfile, 'w') as config_file:
+            config_file.write(BASE_CONFIG)
+        self.inventory = os.path.join(self.work_dir, 'hosts')
+        config = OOConfig(self.configfile)
+        config.settings['ansible_inventory_path'] = self.inventory
+        openshift_ansible.set_config(config)
+
+    def tearDown(self):
+        shutil.rmtree(self.work_dir)
+
+    def generate_hosts(self, num_hosts, name_prefix, roles=None, new_host=False):
+        hosts = []
+        for num in range(1, num_hosts + 1):
+            hosts.append(Host(connect_to=name_prefix + str(num),
+                              roles=roles, new_host=new_host))
+        return hosts
+
+    def test_generate_inventory_new_nodes(self):
+        hosts = self.generate_hosts(1, 'master', roles=(['master', 'etcd']))
+        hosts.extend(self.generate_hosts(1, 'node', roles=['node']))
+        hosts.extend(self.generate_hosts(1, 'new_node', roles=['node'], new_host=True))
+        openshift_ansible.generate_inventory(hosts)
+        inventory = configparser.ConfigParser(allow_no_value=True)
+        inventory.read(self.inventory)
+        self.assertTrue(inventory.has_section('new_nodes'))
+        self.assertTrue(inventory.has_option('new_nodes', 'new_node1'))
+
+    def test_write_inventory_vars_role_vars(self):
+        print(yaml.dump(openshift_ansible.CFG.deployment.roles))
+        with open(self.inventory, 'w') as inv:
+            openshift_ansible.CFG.deployment.roles['master'].variables={'color': 'blue'}
+            openshift_ansible.CFG.deployment.roles['node'].variables={'color': 'green'}
+            openshift_ansible.write_inventory_vars(inv, None)
+
+        inventory = configparser.ConfigParser(allow_no_value=True)
+        inventory.read(self.inventory)
+        print(inventory.sections())
+        self.assertTrue(inventory.has_section('masters:vars'))
+        self.assertEquals('blue', inventory.get('masters:vars', 'color'))
+        self.assertTrue(inventory.has_section('nodes:vars'))
+        self.assertEquals('green', inventory.get('nodes:vars', 'color'))

+ 2 - 3
utils/tox.ini

@@ -1,7 +1,7 @@
 [tox]
 [tox]
 minversion=2.3.1
 minversion=2.3.1
 envlist =
 envlist =
-    py{27,35}-{flake8,unit}
+    py{27,35}-{flake8,unit,pylint}
 skipsdist=True
 skipsdist=True
 skip_missing_interpreters=True
 skip_missing_interpreters=True
 
 
@@ -10,8 +10,7 @@ usedevelop=True
 deps =
 deps =
     -rtest-requirements.txt
     -rtest-requirements.txt
     py35-flake8: flake8-bugbear
     py35-flake8: flake8-bugbear
-
 commands =
 commands =
-    flake8: flake8 --config=setup.cfg ../ --exclude="../utils,.tox,../inventory"
     flake8: python setup.py flake8
     flake8: python setup.py flake8
     unit: python setup.py nosetests
     unit: python setup.py nosetests
+    pylint: python setup.py lint