Browse Source

Infrastructure - Add service action to bin/cluster

* Add necessary playbooks/roles
* Cleanup bin/cluster to meet new design guide lines
Jhon Honce 9 years ago
parent
commit
67afaa13ee

+ 27 - 3
bin/cluster

@@ -9,8 +9,9 @@ import os
 
 class Cluster(object):
     """
-    Control and Configuration Interface for OpenShift Clusters
+    Provide Command, Control and Configuration (c3) Interface for OpenShift Clusters
     """
+
     def __init__(self):
         # setup ansible ssh environment
         if 'ANSIBLE_SSH_ARGS' not in os.environ:
@@ -104,6 +105,21 @@ class Cluster(object):
 
         return self.action(args, inventory, env, playbook)
 
+    def service(self, args):
+        """
+        Make the same service call across all nodes in the cluster
+        :param args: command line arguments provided by user
+        :return: exit status from run command
+        """
+        env = {'cluster_id': args.cluster_id,
+               'deployment_type': self.get_deployment_type(args),
+               'new_cluster_state': args.state}
+
+        playbook = "playbooks/{}/openshift-cluster/service.yml".format(args.provider)
+        inventory = self.setup_provider(args.provider)
+
+        return self.action(args, inventory, env, playbook)
+
     def setup_provider(self, provider):
         """
         Setup ansible playbook environment
@@ -167,7 +183,7 @@ class Cluster(object):
 
 if __name__ == '__main__':
     """
-    Implemented to support writing unit tests
+    User command to invoke ansible playbooks in a "known" environment
     """
 
     cluster = Cluster()
@@ -221,6 +237,13 @@ if __name__ == '__main__':
                                            parents=[meta_parser])
     list_parser.set_defaults(func=cluster.list)
 
+    service_parser = action_parser.add_parser('service', help='service for openshift across cluster',
+                                              parents=[meta_parser])
+    # choices are the only ones valid for the ansible service module: http://docs.ansible.com/service_module.html
+    service_parser.add_argument('state', choices=['started', 'stopped', 'restarted', 'reloaded'],
+                                help='make service call across cluster')
+    service_parser.set_defaults(func=cluster.service)
+
     args = parser.parse_args()
 
     if 'terminate' == args.action and not args.force:
@@ -230,7 +253,8 @@ if __name__ == '__main__':
             exit(1)
 
     if 'update' == args.action and not args.force:
-        answer = raw_input("This is destructive and could corrupt {} environment. Continue? [y/N] ".format(args.cluster_id))
+        answer = raw_input(
+            "This is destructive and could corrupt {} environment. Continue? [y/N] ".format(args.cluster_id))
         if answer not in ['y', 'Y']:
             sys.stderr.write('\nACTION [update] aborted by user!\n')
             exit(1)

+ 28 - 0
playbooks/aws/openshift-cluster/service.yml

@@ -0,0 +1,28 @@
+---
+- name: Call same systemctl command for openshift on all instance(s)
+  hosts: localhost
+  gather_facts: no
+  vars_files:
+  - vars.yml
+  tasks:
+  - fail: msg="cluster_id is required to be injected in this playbook"
+    when: cluster_id is not defined
+
+  - name: Evaluate g_service_masters
+    add_host:
+      name: "{{ item }}"
+      groups: g_service_masters
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+    with_items: groups["tag_env-host-type_{{ cluster_id }}-openshift-master"] | default([])
+
+  - name: Evaluate g_service_nodes
+    add_host:
+      name: "{{ item }}"
+      groups: g_service_nodes
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+    with_items: groups["tag_env-host-type_{{ cluster_id }}-openshift-node"] | default([])
+
+- include: ../../common/openshift-node/service.yml
+- include: ../../common/openshift-master/service.yml

+ 18 - 0
playbooks/common/openshift-master/service.yml

@@ -0,0 +1,18 @@
+---
+- name: Populate g_service_masters host group if needed
+  hosts: localhost
+  gather_facts: no
+  tasks:
+  - fail: msg="new_cluster_state is required to be injected in this playbook"
+    when: new_cluster_state is not defined
+
+  - name: Evaluate g_service_masters
+    add_host: name={{ item }} groups=g_service_masters
+    with_items: oo_host_group_exp | default([])
+
+- name: Change openshift-master state on master instance(s)
+  hosts: g_service_masters
+  connection: ssh
+  gather_facts: no
+  tasks:
+    - service: name=openshift-master state="{{ new_cluster_state }}"

+ 18 - 0
playbooks/common/openshift-node/service.yml

@@ -0,0 +1,18 @@
+---
+- name: Populate g_service_nodes host group if needed
+  hosts: localhost
+  gather_facts: no
+  tasks:
+  - fail: msg="new_cluster_state is required to be injected in this playbook"
+    when: new_cluster_state is not defined
+
+  - name: Evaluate g_service_nodes
+    add_host: name={{ item }} groups=g_service_nodes
+    with_items: oo_host_group_exp | default([])
+
+- name: Change openshift-node state on node instance(s)
+  hosts: g_service_nodes
+  connection: ssh
+  gather_facts: no
+  tasks:
+    - service: name=openshift-node state="{{ new_cluster_state }}"

+ 28 - 0
playbooks/gce/openshift-cluster/service.yml

@@ -0,0 +1,28 @@
+---
+- name: Call same systemctl command for openshift on all instance(s)
+  hosts: localhost
+  gather_facts: no
+  vars_files:
+  - vars.yml
+  tasks:
+  - fail: msg="cluster_id is required to be injected in this playbook"
+    when: cluster_id is not defined
+
+  - set_fact: scratch_group=tag_env-host-type-{{ cluster_id }}-openshift-node
+  - add_host:
+      name: "{{ item }}"
+      groups: g_service_nodes
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user | default(ansible_ssh_user, true) }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+    with_items: groups[scratch_group] | default([]) | difference(['localhost']) | difference(groups.status_terminated)
+
+  - set_fact: scratch_group=tag_env-host-type-{{ cluster_id }}-openshift-master
+  - add_host:
+      name: "{{ item }}"
+      groups: g_service_masters
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user | default(ansible_ssh_user, true) }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+    with_items: groups[scratch_group] | default([]) | difference(['localhost']) | difference(groups.status_terminated)
+
+- include: ../../common/openshift-node/service.yml
+- include: ../../common/openshift-master/service.yml

+ 26 - 0
playbooks/gce/openshift-cluster/wip.yml

@@ -0,0 +1,26 @@
+---
+- name: WIP
+  hosts: localhost
+  connection: local
+  gather_facts: no
+  vars_files:
+  - vars.yml
+  tasks:
+  - name: Evaluate oo_masters_for_deploy
+    add_host:
+      name: "{{ item }}"
+      groups: oo_masters_for_deploy
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user | default(ansible_ssh_user, true) }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+    with_items: groups["tag_env-host-type-{{ cluster_id }}-openshift-master"] | default([])
+
+- name: Deploy OpenShift Services
+  hosts: oo_masters_for_deploy
+  connection: ssh
+  gather_facts: yes
+  user: root
+  vars_files:
+  - vars.yml
+  roles:
+  - openshift_registry
+  - openshift_router

+ 32 - 0
playbooks/libvirt/openshift-cluster/service.yml

@@ -0,0 +1,32 @@
+---
+# TODO: need to figure out a plan for setting hostname, currently the default
+# is localhost, so no hostname value (or public_hostname) value is getting
+# assigned
+
+- name: Call same systemctl command for openshift on all instance(s)
+  hosts: localhost
+  gather_facts: no
+  vars_files:
+  - vars.yml
+  tasks:
+  - fail: msg="cluster_id is required to be injected in this playbook"
+    when: cluster_id is not defined
+
+  - name: Evaluate g_service_masters
+    add_host:
+      name: "{{ item }}"
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+      groups: g_service_masters
+    with_items: groups["tag_env-host-type-{{ cluster_id }}-openshift-master"] | default([])
+
+  - name: Evaluate g_service_nodes
+    add_host:
+      name: "{{ item }}"
+      ansible_ssh_user: "{{ deployment_vars[deployment_type].ssh_user }}"
+      ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
+      groups: g_service_nodes
+    with_items: groups["tag_env-host-type-{{ cluster_id }}-openshift-node"] | default([])
+
+- include: ../../common/openshift-node/service.yml
+- include: ../../common/openshift-master/service.yml