소스 검색

Merge pull request #3816 from sosiouxme/20170328-integration-tests

Merged by openshift-bot
OpenShift Bot 8 년 전
부모
커밋
c12b009449
30개의 변경된 파일948개의 추가작업 그리고 1개의 파일을 삭제
  1. 39 0
      test/integration/README.md
  2. 101 0
      test/integration/build-images.sh
  3. 30 0
      test/integration/openshift_health_checker/builds/aos-package-checks/Dockerfile
  4. 5 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/break-yum.repo
  5. 5 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/ose-3.2.repo
  6. 5 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/ose-3.3.repo
  7. 33 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/root/break-yum-update-2.spec
  8. 32 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/root/break-yum-update.spec
  9. 44 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/root/ose-3.2.spec
  10. 44 0
      test/integration/openshift_health_checker/builds/aos-package-checks/root/root/ose-3.3.spec
  11. 2 0
      test/integration/openshift_health_checker/builds/test-target-base/Dockerfile
  12. 99 0
      test/integration/openshift_health_checker/common.go
  13. 20 0
      test/integration/openshift_health_checker/preflight/playbooks/package_availability_missing_required.yml
  14. 20 0
      test/integration/openshift_health_checker/preflight/playbooks/package_availability_succeeds.yml
  15. 24 0
      test/integration/openshift_health_checker/preflight/playbooks/package_update_dep_missing.yml
  16. 31 0
      test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_broken.yml
  17. 21 0
      test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_disabled.yml
  18. 27 0
      test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_unreachable.yml
  19. 24 0
      test/integration/openshift_health_checker/preflight/playbooks/package_version_matches.yml
  20. 24 0
      test/integration/openshift_health_checker/preflight/playbooks/package_version_mismatches.yml
  21. 26 0
      test/integration/openshift_health_checker/preflight/playbooks/package_version_multiple.yml
  22. 20 0
      test/integration/openshift_health_checker/preflight/playbooks/package_version_origin.yml
  23. 1 0
      test/integration/openshift_health_checker/preflight/playbooks/roles
  24. 9 0
      test/integration/openshift_health_checker/preflight/playbooks/tasks/enable_repo.yml
  25. 105 0
      test/integration/openshift_health_checker/preflight/preflight_test.go
  26. 45 0
      test/integration/openshift_health_checker/setup_container.yml
  27. 23 0
      test/integration/openshift_health_checker/teardown_container.yml
  28. 80 0
      test/integration/run-tests.sh
  29. 1 1
      test/modify_yaml_tests.py
  30. 8 0
      tox.ini

+ 39 - 0
test/integration/README.md

@@ -0,0 +1,39 @@
+# Integration tests
+
+Integration tests exercise the OpenShift Ansible playbooks by running them
+against an inventory with Docker containers as hosts.
+
+## Requirements
+
+The tests assume that:
+
+* docker is running on localhost and the present user has access to use it.
+* golang is installed and the go binary is in PATH.
+* python and tox are installed.
+
+## Building images
+
+The tests rely on images built in the local docker index. You can build them
+from the repository root with:
+
+```
+./test/integration/build-images.sh
+```
+
+Use the `--help` option to view available options.
+
+## Running the tests
+
+From the repository root, run the integration tests with:
+
+```
+./test/integration/run-tests.sh
+```
+
+Use the `--help` option to view available options.
+
+You can also run tests more directly, for example to run a specific check:
+
+```
+go test ./test/integration/... -run TestPackageUpdateDepMissing
+```

+ 101 - 0
test/integration/build-images.sh

@@ -0,0 +1,101 @@
+#!/bin/bash
+
+# This is intended to run either locally (in which case a push is not
+# necessary) or in a CI job (where the results should be pushed to a
+# registry for use in later CI test jobs). Images are tagged locally with
+# both the base name (e.g. "test-target-base") and with the prefix given;
+# then only the prefixed name is pushed if --push is specified, assuming
+# any necessary credentials are available for the push. The same prefix
+# can then be used for the testing script. By default a local (non-registry)
+# prefix is used and no push can occur. To push to e.g. dockerhub:
+#
+# ./build-images.sh --push --prefix=docker.io/openshift/ansible-integration-
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+STARTTIME=$(date +%s)
+source_root=$(dirname "${0}")
+
+prefix="${PREFIX:-openshift-ansible-integration-}"
+push=false
+verbose=false
+build_options="${DOCKER_BUILD_OPTIONS:-}"
+help=false
+
+for args in "$@"
+do
+  case $args in
+      --prefix=*)
+        prefix="${args#*=}"
+        ;;
+      --push)
+        push=true
+        ;;
+      --no-cache)
+        build_options="${build_options} --no-cache"
+        ;;
+      --verbose)
+        verbose=true
+        ;;
+      --help)
+        help=true
+        ;;
+  esac
+done
+
+if [ "$help" = true ]; then
+  echo "Builds the docker images for openshift-ansible integration tests"
+  echo "and pushes them to a central registry."
+  echo
+  echo "Options: "
+  echo "  --prefix=PREFIX"
+  echo "  The prefix to use for the image names."
+  echo "  default: openshift-ansible-integration-"
+  echo
+  echo "  --push"
+  echo "  If set will push the tagged image"
+  echo 
+  echo "  --no-cache"
+  echo "  If set will perform the build without a cache."
+  echo
+  echo "  --verbose"
+  echo "  Enables printing of the commands as they run."
+  echo
+  echo "  --help"
+  echo "  Prints this help message"
+  echo
+  exit 0
+fi
+
+if [ "$verbose" = true ]; then
+  set -x
+fi
+
+
+declare -a build_order                       ; declare -A images
+build_order+=( test-target-base )            ; images[test-target-base]=openshift_health_checker/builds/test-target-base
+build_order+=( preflight-aos-package-checks ); images[preflight-aos-package-checks]=openshift_health_checker/builds/aos-package-checks
+for image in "${build_order[@]}"; do
+  BUILD_STARTTIME=$(date +%s)
+  docker_tag=${prefix}${image}
+  echo
+  echo "--- Building component '$image' with docker tag '$docker_tag' ---"
+  docker build ${build_options} -t $image -t $docker_tag "$source_root/${images[$image]}"
+  echo
+  BUILD_ENDTIME=$(date +%s); echo "--- build $docker_tag took $(($BUILD_ENDTIME - $BUILD_STARTTIME)) seconds ---"
+  if [ "$push" = true ]; then
+    docker push $docker_tag
+    PUSH_ENDTIME=$(date +%s); echo "--- push $docker_tag took $(($PUSH_ENDTIME - $BUILD_ENDTIME)) seconds ---"
+  fi
+done
+
+echo
+echo
+echo "++ Active images"
+docker images | grep ${prefix} | sort
+echo
+
+
+ret=$?; ENDTIME=$(date +%s); echo "$0 took $(($ENDTIME - $STARTTIME)) seconds"; exit "$ret"

+ 30 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/Dockerfile

@@ -0,0 +1,30 @@
+FROM test-target-base
+
+RUN yum install -y rpm-build rpmdevtools createrepo && \
+    rpmdev-setuptree && \
+    mkdir -p /mnt/localrepo
+ADD root /
+
+# we will build some RPMs that can be used to break yum update in tests.
+RUN cd /root/rpmbuild/SOURCES && \
+    mkdir break-yum-update-1.0 && \
+    tar zfc foo.tgz break-yum-update-1.0 && \
+    rpmbuild -bb /root/break-yum-update.spec  && \
+    yum install -y /root/rpmbuild/RPMS/noarch/break-yum-update-1.0-1.noarch.rpm && \
+    rpmbuild -bb /root/break-yum-update-2.spec  && \
+    mkdir /mnt/localrepo/break-yum && \
+    cp /root/rpmbuild/RPMS/noarch/break-yum-update-1.0-2.noarch.rpm /mnt/localrepo/break-yum && \
+    createrepo /mnt/localrepo/break-yum
+
+# we'll also build some RPMs that can be used to exercise OCP package version tests.
+RUN cd /root/rpmbuild/SOURCES && \
+    mkdir atomic-openshift-3.2 && \
+    mkdir atomic-openshift-3.3 && \
+    tar zfc ose.tgz atomic-openshift-3.{2,3} && \
+    rpmbuild -bb /root/ose-3.2.spec  && \
+    rpmbuild -bb /root/ose-3.3.spec  && \
+    mkdir /mnt/localrepo/ose-3.{2,3} && \
+    cp /root/rpmbuild/RPMS/noarch/atomic-openshift*-3.2-1.noarch.rpm /mnt/localrepo/ose-3.2 && \
+    createrepo /mnt/localrepo/ose-3.2 && \
+    cp /root/rpmbuild/RPMS/noarch/atomic-openshift*-3.3-1.noarch.rpm /mnt/localrepo/ose-3.3 && \
+    createrepo /mnt/localrepo/ose-3.3

+ 5 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/break-yum.repo

@@ -0,0 +1,5 @@
+[break-yum]
+name=break-yum
+baseurl=file:///mnt/localrepo/break-yum
+enabled=0
+gpgcheck=0

+ 5 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/ose-3.2.repo

@@ -0,0 +1,5 @@
+[ose-3.2]
+name=ose-3.2
+baseurl=file:///mnt/localrepo/ose-3.2
+enabled=0
+gpgcheck=0

+ 5 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/etc/yum.repos.d/ose-3.3.repo

@@ -0,0 +1,5 @@
+[ose-3.3]
+name=ose-3.3
+baseurl=file:///mnt/localrepo/ose-3.3
+enabled=0
+gpgcheck=0

+ 33 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/root/break-yum-update-2.spec

@@ -0,0 +1,33 @@
+Name:           break-yum-update
+Version:        1.0
+Release:        2
+Summary:        Package for breaking updates by requiring things that don't exist
+
+License:        NA
+
+Requires:	package-that-does-not-exist
+Source0:	http://example.com/foo.tgz
+BuildArch:	noarch
+
+%description
+Package for breaking updates by requiring things that don't exist
+
+
+%prep
+%setup -q
+
+
+%build
+
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT
+
+
+%files
+%doc
+
+
+
+%changelog

+ 32 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/root/break-yum-update.spec

@@ -0,0 +1,32 @@
+Name:           break-yum-update
+Version:        1.0
+Release:        1
+Summary:        Package for breaking updates by requiring things that don't exist
+
+License:        NA
+
+Source0:	http://example.com/foo.tgz
+BuildArch:	noarch
+
+%description
+Package for breaking updates by requiring things that don't exist
+
+
+%prep
+%setup -q
+
+
+%build
+
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT
+
+
+%files
+%doc
+
+
+
+%changelog

+ 44 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/root/ose-3.2.spec

@@ -0,0 +1,44 @@
+Name:           atomic-openshift
+Version:        3.2
+Release:        1
+Summary:        package the critical aos packages
+
+License:        NA
+
+Source0:	http://example.com/ose.tgz
+BuildArch:	noarch
+
+%package master
+Summary:        package the critical aos packages
+%package node
+Summary:        package the critical aos packages
+
+%description
+Package for pretending to provide AOS
+
+%description master
+Package for pretending to provide AOS
+
+%description node
+Package for pretending to provide AOS
+
+%prep
+%setup -q
+
+
+%build
+
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT
+
+
+%files
+%files master
+%files node
+%doc
+
+
+
+%changelog

+ 44 - 0
test/integration/openshift_health_checker/builds/aos-package-checks/root/root/ose-3.3.spec

@@ -0,0 +1,44 @@
+Name:           atomic-openshift
+Version:        3.3
+Release:        1
+Summary:        package the critical aos packages
+
+License:        NA
+
+Source0:	http://example.com/ose.tgz
+BuildArch:	noarch
+
+%package master
+Summary:        package the critical aos packages
+%package node
+Summary:        package the critical aos packages
+
+%description
+Package for pretending to provide AOS
+
+%description master
+Package for pretending to provide AOS
+
+%description node
+Package for pretending to provide AOS
+
+%prep
+%setup -q
+
+
+%build
+
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT
+
+
+%files
+%files master
+%files node
+%doc
+
+
+
+%changelog

+ 2 - 0
test/integration/openshift_health_checker/builds/test-target-base/Dockerfile

@@ -0,0 +1,2 @@
+FROM centos/systemd
+RUN yum install -y iproute python-dbus PyYAML yum-utils

+ 99 - 0
test/integration/openshift_health_checker/common.go

@@ -0,0 +1,99 @@
+package test
+
+import (
+	"bytes"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"strings"
+	"syscall"
+	"testing"
+)
+
+// A PlaybookTest executes a given Ansible playbook and checks the exit code and
+// output contents.
+type PlaybookTest struct {
+	// inputs
+	Path string
+	// expected outputs
+	ExitCode int
+	Output   []string // zero or more strings that should be in the output
+}
+
+// Run runs the PlaybookTest.
+func (p PlaybookTest) Run(t *testing.T) {
+	// A PlaybookTest is intended to be run in parallel with other tests.
+	t.Parallel()
+
+	cmd := exec.Command("ansible-playbook", "-i", "/dev/null", p.Path)
+	cmd.Env = append(os.Environ(), "ANSIBLE_FORCE_COLOR=1")
+	b, err := cmd.CombinedOutput()
+
+	// Check exit code.
+	if (err == nil) && (p.ExitCode != 0) {
+		p.checkExitCode(t, 0, p.ExitCode, cmd, b)
+	}
+	if (err != nil) && (p.ExitCode == 0) {
+		got, ok := getExitCode(err)
+		if !ok {
+			t.Logf("unexpected error (%T): %[1]v", err)
+			p.logCmdAndOutput(t, cmd, b)
+			t.FailNow()
+		}
+		p.checkExitCode(t, got, p.ExitCode, cmd, b)
+	}
+
+	// Check output contents.
+	var missing []string
+	for _, s := range p.Output {
+		if !bytes.Contains(b, []byte(s)) {
+			missing = append(missing, s)
+		}
+	}
+	if len(missing) > 0 {
+		t.Logf("missing in output: %q", missing)
+		p.logCmdAndOutput(t, cmd, b)
+		t.FailNow()
+	}
+}
+
+// getExitCode returns an exit code and true if the exit code could be taken
+// from err, false otherwise.
+// The implementation is GOOS-specific, and currently only supports Linux.
+func getExitCode(err error) (int, bool) {
+	exitErr, ok := err.(*exec.ExitError)
+	if !ok {
+		return -1, false
+	}
+	waitStatus, ok := exitErr.Sys().(syscall.WaitStatus)
+	if !ok {
+		return -1, false
+	}
+	return waitStatus.ExitStatus(), true
+}
+
+// checkExitCode marks the test as failed when got is different than want.
+func (p PlaybookTest) checkExitCode(t *testing.T, got, want int, cmd *exec.Cmd, output []byte) {
+	if got == want {
+		return
+	}
+	t.Logf("got exit code %v, want %v", got, want)
+	p.logCmdAndOutput(t, cmd, output)
+	t.FailNow()
+}
+
+// logCmdAndOutput logs how to re-run a command and a summary of the output of
+// its last execution for debugging.
+func (p PlaybookTest) logCmdAndOutput(t *testing.T, cmd *exec.Cmd, output []byte) {
+	const maxLines = 10
+	lines := bytes.Split(bytes.TrimRight(output, "\n"), []byte("\n"))
+	if len(lines) > maxLines {
+		lines = append([][]byte{[]byte("...")}, lines[len(lines)-maxLines:len(lines)]...)
+	}
+	output = bytes.Join(lines, []byte("\n"))
+	dir, err := filepath.Abs(cmd.Dir)
+	if err != nil {
+		panic(err)
+	}
+	t.Logf("\n$ (cd %s && %s)\n%s", dir, strings.Join(cmd.Args, " "), output)
+}

+ 20 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_availability_missing_required.yml

@@ -0,0 +1,20 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      deployment_type: openshift-enterprise
+
+- name: Fail as required packages cannot be installed
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_availability' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 20 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_availability_succeeds.yml

@@ -0,0 +1,20 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      deployment_type: origin
+
+- name: Succeeds as Origin packages are public
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_availability' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 24 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_update_dep_missing.yml

@@ -0,0 +1,24 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: openshift-enterprise
+      openshift_release: 3.2
+
+- name: Fails when a dependency required for update is missing
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "break-yum" }
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_update' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 31 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_broken.yml

@@ -0,0 +1,31 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: openshift-enterprise
+      openshift_release: 3.2
+
+- name: Fails when a repo definition is completely broken
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "break-yum" }
+
+        - name: Break the break-yum repo
+          replace:
+            dest: /etc/yum.repos.d/break-yum.repo
+            backup: no
+            regexp: "^baseurl"
+            replace: "#baseurl"
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_update' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 21 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_disabled.yml

@@ -0,0 +1,21 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: openshift-enterprise
+      openshift_release: 3.2
+
+- name: Succeeds when nothing blocks a yum update
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_update' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 27 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_update_repo_unreachable.yml

@@ -0,0 +1,27 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: openshift-enterprise
+      openshift_release: 3.2
+
+- name: Fails when repo content is not available
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "break-yum" }
+
+        - name: Remove the local repo entirely
+          file: path=/mnt/localrepo state=absent
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_update' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 24 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_version_matches.yml

@@ -0,0 +1,24 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      deployment_type: openshift-enterprise
+      openshift_release: 3.2
+
+- name: Success when AOS version matches openshift_release
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "ose-3.2" }
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_version' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 24 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_version_mismatches.yml

@@ -0,0 +1,24 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      deployment_type: openshift-enterprise
+      openshift_release: 3.3
+
+- name: Failure when AOS version doesn't match openshift_release
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "ose-3.2" }
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_version' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 26 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_version_multiple.yml

@@ -0,0 +1,26 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: openshift-enterprise
+
+- name: Fails when multiple AOS versions are available
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "ose-3.2" }
+
+        - include: tasks/enable_repo.yml
+          vars: { repo_name: "ose-3.3" }
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_version' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 20 - 0
test/integration/openshift_health_checker/preflight/playbooks/package_version_origin.yml

@@ -0,0 +1,20 @@
+---
+- include: ../../setup_container.yml
+  vars:
+    image: preflight-aos-package-checks
+    l_host_vars:
+      openshift_deployment_type: origin
+
+- name: Succeeds with Origin although multiple versions are available
+  hosts: all
+  roles:
+    - openshift_health_checker
+  tasks:
+    - block:
+
+        - action: openshift_health_check
+          args:
+            checks: [ 'package_version' ]
+
+      always:  # destroy the container whether check passed or not
+        - include: ../../teardown_container.yml

+ 1 - 0
test/integration/openshift_health_checker/preflight/playbooks/roles

@@ -0,0 +1 @@
+../../../../../roles

+ 9 - 0
test/integration/openshift_health_checker/preflight/playbooks/tasks/enable_repo.yml

@@ -0,0 +1,9 @@
+---
+- name: Enable {{ repo_name }} repo
+  # believe it or not we can't use the yum_repository module for this.
+  # https://github.com/ansible/ansible-modules-extras/issues/2384
+  ini_file:
+    dest: /etc/yum.repos.d/{{ repo_name }}.repo
+    section: "{{ repo_name }}"
+    option: enabled
+    value: 1

+ 105 - 0
test/integration/openshift_health_checker/preflight/preflight_test.go

@@ -0,0 +1,105 @@
+package preflight
+
+import (
+	"testing"
+
+	. ".."
+)
+
+func TestPackageUpdateDepMissing(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_update_dep_missing.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_update\":",
+			"Could not perform a yum update.",
+			"break-yum-update-1.0-2.noarch requires package-that-does-not-exist",
+		},
+	}.Run(t)
+}
+
+func TestPackageUpdateRepoBroken(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_update_repo_broken.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_update\":",
+			"Error with yum repository configuration: Cannot find a valid baseurl for repo",
+		},
+	}.Run(t)
+}
+
+func TestPackageUpdateRepoDisabled(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_update_repo_disabled.yml",
+		ExitCode: 0,
+		Output: []string{
+			"CHECK [package_update",
+		},
+	}.Run(t)
+}
+
+func TestPackageUpdateRepoUnreachable(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_update_repo_unreachable.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_update\":",
+			"Error getting data from at least one yum repository",
+		},
+	}.Run(t)
+}
+
+func TestPackageVersionMatches(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_version_matches.yml",
+		ExitCode: 0,
+		Output: []string{
+			"CHECK [package_version",
+		},
+	}.Run(t)
+}
+
+func TestPackageVersionMismatches(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_version_mismatches.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_version\":",
+			"Not all of the required packages are available at requested version",
+		},
+	}.Run(t)
+}
+
+func TestPackageVersionMultiple(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_version_multiple.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_version\":",
+			"Multiple minor versions of these packages are available",
+		},
+	}.Run(t)
+}
+
+func TestPackageAvailabilityMissingRequired(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_availability_missing_required.yml",
+		ExitCode: 2,
+		Output: []string{
+			"check \"package_availability\":",
+			"Cannot install all of the necessary packages.",
+			"atomic-openshift",
+		},
+	}.Run(t)
+}
+
+func TestPackageAvailabilitySucceeds(t *testing.T) {
+	PlaybookTest{
+		Path:     "playbooks/package_availability_succeeds.yml",
+		ExitCode: 0,
+		Output: []string{
+			"CHECK [package_availability",
+		},
+	}.Run(t)
+}

+ 45 - 0
test/integration/openshift_health_checker/setup_container.yml

@@ -0,0 +1,45 @@
+---
+# Include this play once for each container you want to create and use as a test host.
+#
+# Optional parameters on the include are as follows:
+# * scenario = unique name for the container to be started
+# * image = name of the image to start in the container
+# * command = command to run in the container
+# * l_groups = host groups that the container should be added to
+# * l_host_vars = any variables that should be added to the host
+
+- name: Start container for specified test host
+  gather_facts: no
+  hosts: localhost
+  connection: local
+  tasks:
+
+    - set_fact:
+        # This is a little weird but if we use a var instead of a fact,
+        # a different random value is generated for each task. See:
+        # https://opensolitude.com/2015/05/27/ansible-lookups-variables-vs-facts.html
+        container_name: openshift_ansible_test_{{ scenario | default(100000000000000 | random) }}
+
+    - name: start container
+      docker_container:
+        name: "{{ container_name }}"
+        image: "{{ lookup('env', 'IMAGE_PREFIX') | default('openshift-ansible-integration-', true) }}{{ image | default('test-target-base') }}"
+        command: "{{ command | default('sleep 1800') }}"
+        recreate: yes
+        # NOTE: When/if we need to run containers that are docker hosts as well:
+        # volumes: [ "/var/run/docker.sock:/var/run/docker.sock:z" ]
+
+    - name: add container as host in inventory
+      add_host:
+        ansible_connection: docker
+        name: "{{ container_name }}"
+        groups: '{{ l_groups | default("masters,nodes,etcd") }}'
+
+    # There ought to be a better way to transfer the host vars, but see:
+    # https://groups.google.com/forum/#!topic/Ansible-project/Jwx8RYhqxPA
+    - name: set host facts per test parameters
+      set_fact:
+        "{{ item.key }}": "{{ item.value }}"
+      delegate_facts: True
+      delegate_to: "{{ container_name }}"
+      with_dict: "{{ l_host_vars | default({}) }}"

+ 23 - 0
test/integration/openshift_health_checker/teardown_container.yml

@@ -0,0 +1,23 @@
+---
+
+# Include this to delete the current test host container.
+#
+# In order to recover from test exceptions, this cleanup is expected to
+# be done in an "always:" task on the same block as the test task(s). So
+# it happens in a task "on" the host being tested. In order to delete the
+# host's container, the task uses its own hostname (which is same as the
+# container name) but delegates the docker action to localhost.
+
+- block:
+
+    # so handlers don't break the test by trying to run after teardown:
+    - meta: flush_handlers
+
+  always:
+
+    - name: delete test container
+      delegate_to: localhost
+      connection: local
+      docker_container:
+        name: "{{ inventory_hostname }}"
+        state: absent

+ 80 - 0
test/integration/run-tests.sh

@@ -0,0 +1,80 @@
+#!/bin/bash
+
+# This script runs the golang integration tests in the directories underneath.
+# It should be run from the same directory it is in, or in a directory above.
+# Specify the same image prefix used (if any) with build-images.sh
+#
+# Example:
+# ./run-tests.sh --prefix=docker.io/openshift/ansible-integration- --parallel=16
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+source_root=$(dirname "${0}")
+
+prefix="${PREFIX:-openshift-ansible-integration-}"
+gotest_options="${GOTEST_OPTIONS:--v}"
+push=false
+verbose=false
+help=false
+
+for args in "$@"
+do
+  case $args in
+      --prefix=*)
+        prefix="${args#*=}"
+        ;;
+      --parallel=*)
+        gotest_options="${gotest_options} -parallel ${args#*=}"
+        ;;
+      --verbose)
+        verbose=true
+        ;;
+      --help)
+        help=true
+        ;;
+  esac
+done
+
+if [ "$help" = true ]; then
+  echo "Runs the openshift-ansible integration tests."
+  echo
+  echo "Options: "
+  echo "  --prefix=PREFIX"
+  echo "  The prefix to use for the image names."
+  echo "  default: openshift-ansible-integration-"
+  echo
+  echo "  --parallel=NUMBER"
+  echo "  Number of tests to run in parallel."
+  echo "  default: GOMAXPROCS (typically, number of processors)"
+  echo
+  echo "  --verbose"
+  echo "  Enables printing of the commands as they run."
+  echo
+  echo "  --help"
+  echo "  Prints this help message"
+  echo
+  exit 0
+fi
+
+
+
+if ! [ -d $source_root/../../.tox/integration ]; then
+  # have tox create a consistent virtualenv
+  pushd $source_root/../..; tox -e integration; popd
+fi
+# use the virtualenv from tox
+set +o nounset; source $source_root/../../.tox/integration/bin/activate; set -o nounset
+
+if [ "$verbose" = true ]; then
+  set -x
+fi
+
+# Run the tests. NOTE: "go test" requires a relative path for this purpose.
+# The PWD trick below will only work if cwd is in/above where this script lives.
+retval=0
+IMAGE_PREFIX="${prefix}" env -u GOPATH \
+  go test ./${source_root#$PWD}/... ${gotest_options}
+
+

+ 1 - 1
test/modify_yaml_tests.py

@@ -5,7 +5,7 @@ import os
 import sys
 import unittest
 
-sys.path = [os.path.abspath(os.path.dirname(__file__) + "/../library/")] + sys.path
+sys.path = [os.path.abspath(os.path.dirname(__file__) + "/../../library/")] + sys.path
 
 # pylint: disable=import-error
 from modify_yaml import set_key  # noqa: E402

+ 8 - 0
tox.ini

@@ -3,6 +3,7 @@ minversion=2.3.1
 envlist =
     py{27,35}-{flake8,pylint,unit}
     py27-{yamllint,ansible_syntax,generate_validation}
+    integration
 skipsdist=True
 skip_missing_interpreters=True
 
@@ -12,6 +13,7 @@ deps =
     -rrequirements.txt
     -rtest-requirements.txt
     py35-flake8: flake8-bugbear==17.3.0
+    integration: docker-py==1.10.6
 
 commands =
     unit: pip install -e utils
@@ -22,3 +24,9 @@ commands =
     generate_validation: python setup.py generate_validation
     # TODO(rhcarvalho): check syntax of other important entrypoint playbooks
     ansible_syntax: python setup.py ansible_syntax
+    # ansible 2.2.2+ unfortunately breaks the integration test runner
+    # because it can no longer set facts on the test docker hosts.
+    # So for now, install separate ansible version for integration.
+    # PR that fixes it: https://github.com/ansible/ansible/pull/23599
+    # Once that PR is available, drop this and use same ansible.
+    integration: pip install ansible==2.2.1.0