Browse Source

Remove outdated playbooks

- Remove aws openshift-node and openshift-master playbooks
- Rmove gce openshift-node and openshift-master playbooks
- Consolidate aws terminate playbooks
Jason DeTiberus 9 years ago
parent
commit
f08e64ac98

+ 50 - 1
playbooks/aws/openshift-cluster/terminate.yml

@@ -13,4 +13,53 @@
       ansible_sudo: "{{ deployment_vars[deployment_type].sudo }}"
     with_items: groups[scratch_group] | default([]) | difference(['localhost'])
 
-- include: ../terminate.yml
+- name: Terminate instances
+  hosts: localhost
+  connection: local
+  gather_facts: no
+  vars:
+    host_vars: "{{ hostvars
+                   | oo_select_keys(groups['oo_hosts_to_terminate']) }}"
+  tasks:
+    - name: Remove tags from instances
+      ec2_tag: resource={{ item.ec2_id }} region={{ item.ec2_region }} state=absent
+      args:
+        tags:
+          env: "{{ item['ec2_tag_env'] }}"
+          host-type: "{{ item['ec2_tag_host-type'] }}"
+          env-host-type: "{{ item['ec2_tag_env-host-type'] }}"
+      with_items: host_vars
+      when: "'oo_hosts_to_terminate' in groups"
+
+    - name: Terminate instances
+      ec2:
+        state: absent
+        instance_ids: ["{{ item.ec2_id }}"]
+        region: "{{ item.ec2_region }}"
+      ignore_errors: yes
+      register: ec2_term
+      with_items: host_vars
+      when: "'oo_hosts_to_terminate' in groups"
+
+    # Fail if any of the instances failed to terminate with an error other
+    # than 403 Forbidden
+    - fail: msg=Terminating instance {{ item.item.ec2_id }} failed with message {{ item.msg }}
+      when: "'oo_hosts_to_terminate' in groups and item.failed and not item.msg | search(\"error: EC2ResponseError: 403 Forbidden\")"
+      with_items: ec2_term.results
+
+    - name: Stop instance if termination failed
+      ec2:
+        state: stopped
+        instance_ids: ["{{ item.item.ec2_id }}"]
+        region: "{{ item.item.ec2_region }}"
+      register: ec2_stop
+      when: "'oo_hosts_to_terminate' in groups and item.failed"
+      with_items: ec2_term.results
+
+    - name: Rename stopped instances
+      ec2_tag: resource={{ item.item.item.ec2_id }} region={{ item.item.item.ec2_region }} state=present
+      args:
+        tags:
+          Name: "{{ item.item.item.ec2_tag_Name }}-terminate"
+      with_items: ec2_stop.results
+      when: "'oo_hosts_to_terminate' in groups"

+ 0 - 19
playbooks/aws/openshift-master/config.yml

@@ -1,19 +0,0 @@
----
-- name: Populate oo_masters_to_config host group
-  hosts: localhost
-  gather_facts: no
-  tasks:
-  - name: Evaluate oo_masters_to_config
-    add_host:
-      name: "{{ item }}"
-      groups: oo_masters_to_config
-      ansible_ssh_user: root
-    with_items: oo_host_group_exp | default([])
-
-- include: ../../common/openshift-master/config.yml
-  vars:
-    openshift_cluster_id: "{{ cluster_id }}"
-    openshift_debug_level: 4
-    openshift_deployment_type: "{{ deployment_type }}"
-    openshift_hostname: "{{ ec2_private_ip_address }}"
-    openshift_public_hostname: "{{ ec2_ip_address }}"

+ 0 - 1
playbooks/aws/openshift-master/filter_plugins

@@ -1 +0,0 @@
-../../../filter_plugins

+ 0 - 70
playbooks/aws/openshift-master/launch.yml

@@ -1,70 +0,0 @@
----
-- name: Launch instance(s)
-  hosts: localhost
-  connection: local
-  gather_facts: no
-
-# TODO: modify g_ami based on deployment_type
-  vars:
-    inst_region: us-east-1
-    g_ami: ami-86781fee
-    user_data_file: user_data.txt
-
-  tasks:
-    - name: Launch instances
-      ec2:
-        state: present
-        region: "{{ inst_region }}"
-        keypair: libra
-        group: ['public']
-        instance_type: m3.large
-        image: "{{ g_ami }}"
-        count: "{{ oo_new_inst_names | length }}"
-        user_data: "{{ lookup('file', user_data_file) }}"
-        wait: yes
-      register: ec2
-
-    - name: Add new instances public IPs to the host group
-      add_host: "hostname={{ item.public_ip }} groupname=new_ec2_instances"
-      with_items: ec2.instances
-
-    - name: Add Name and environment tags to instances
-      ec2_tag: "resource={{ item.1.id }} region={{ inst_region }} state=present"
-      with_together:
-        - oo_new_inst_names
-        - ec2.instances
-      args:
-        tags:
-          Name: "{{ item.0 }}"
-
-    - name: Add other tags to instances
-      ec2_tag: resource={{ item.id }} region={{ inst_region }} state=present
-      with_items: ec2.instances
-      args:
-        tags: "{{ oo_new_inst_tags }}"
-
-    - name: Add new instances public IPs to oo_masters_to_config
-      add_host:
-        hostname: "{{ item.0 }}"
-        ansible_ssh_host: "{{ item.1.dns_name }}"
-        groupname: oo_masters_to_config
-        ec2_private_ip_address: "{{ item.1.private_ip }}"
-        ec2_ip_address: "{{ item.1.public_ip }}"
-      with_together:
-        - oo_new_inst_names
-        - ec2.instances
-
-    - name: Wait for ssh
-      wait_for: port=22 host={{ item.dns_name }}
-      with_items: ec2.instances
-
-    - name: Wait for root user setup
-      command: "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -o ConnectTimeout=10 -o UserKnownHostsFile=/dev/null root@{{ item.dns_name }} echo root user is setup"
-      register: result
-      until: result.rc == 0
-      retries: 20
-      delay: 10
-      with_items: ec2.instances
-
-# Apply the configs, seprate so that just the configs can be run by themselves
-- include: config.yml

+ 0 - 1
playbooks/aws/openshift-master/roles

@@ -1 +0,0 @@
-../../../roles

+ 0 - 2
playbooks/aws/openshift-master/terminate.yml

@@ -1,2 +0,0 @@
----
-- include: ../terminate.yml

+ 0 - 26
playbooks/aws/openshift-node/config.yml

@@ -1,26 +0,0 @@
----
-- name: Populate oo_nodes_to_config and oo_first_master host groups
-  hosts: localhost
-  gather_facts: no
-  tasks:
-  - name: Evaluate oo_nodes_to_config
-    add_host:
-      name: "{{ item }}"
-      groups: oo_nodes_to_config
-      ansible_ssh_user: root
-    with_items: oo_host_group_exp | default([])
-  - name: Evaluate oo_first_master
-    add_host:
-      name: "{{ groups['tag_env-host-type_' ~ cluster_id ~ '-openshift-master'][0] }}"
-      groups: oo_first_master
-      ansible_ssh_user: root
-
-
-- include: ../../common/openshift-node/config.yml
-  vars:
-    openshift_cluster_id: "{{ cluster_id }}"
-    openshift_debug_level: 4
-    openshift_deployment_type: "{{ deployment_type }}"
-    openshift_first_master: "{{ groups.oo_first_master.0 }}"
-    openshift_hostname: "{{ ec2_private_ip_address }}"
-    openshift_public_hostname: "{{ ec2_ip_address }}"

+ 0 - 1
playbooks/aws/openshift-node/filter_plugins

@@ -1 +0,0 @@
-../../../filter_plugins

+ 0 - 72
playbooks/aws/openshift-node/launch.yml

@@ -1,72 +0,0 @@
----
-- name: Launch instance(s)
-  hosts: localhost
-  connection: local
-  gather_facts: no
-
-# TODO: modify g_ami based on deployment_type
-  vars:
-    inst_region: us-east-1
-    g_ami: ami-86781fee
-    user_data_file: user_data.txt
-
-  tasks:
-    - name: Launch instances
-      ec2:
-        state: present
-        region: "{{ inst_region }}"
-        keypair: libra
-        group: ['public']
-        instance_type: m3.large
-        image: "{{ g_ami }}"
-        count: "{{ oo_new_inst_names | length }}"
-        user_data: "{{ lookup('file', user_data_file) }}"
-        wait: yes
-      register: ec2
-
-    - name: Add new instances public IPs to the host group
-      add_host:
-        hostname: "{{ item.public_ip }}"
-        groupname: new_ec2_instances"
-      with_items: ec2.instances
-
-    - name: Add Name and environment tags to instances
-      ec2_tag: resource={{ item.1.id }} region={{ inst_region }} state=present
-      with_together:
-        - oo_new_inst_names
-        - ec2.instances
-      args:
-        tags:
-          Name: "{{ item.0 }}"
-
-    - name: Add other tags to instances
-      ec2_tag: resource={{ item.id }} region={{ inst_region }} state=present
-      with_items: ec2.instances
-      args:
-        tags: "{{ oo_new_inst_tags }}"
-
-    - name: Add new instances public IPs to oo_nodes_to_config
-      add_host:
-        hostname: "{{ item.0 }}"
-        ansible_ssh_host: "{{ item.1.dns_name }}"
-        groupname: oo_nodes_to_config
-        ec2_private_ip_address: "{{ item.1.private_ip }}"
-        ec2_ip_address: "{{ item.1.public_ip }}"
-      with_together:
-        - oo_new_inst_names
-        - ec2.instances
-
-    - name: Wait for ssh
-      wait_for: port=22 host={{ item.dns_name }}
-      with_items: ec2.instances
-
-    - name: Wait for root user setup
-      command: "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -o ConnectTimeout=10 -o UserKnownHostsFile=/dev/null root@{{ item.dns_name }} echo root user is setup"
-      register: result
-      until: result.rc == 0
-      retries: 20
-      delay: 10
-      with_items: ec2.instances
-
-# Apply the configs, seprate so that just the configs can be run by themselves
-- include: config.yml

+ 0 - 1
playbooks/aws/openshift-node/roles

@@ -1 +0,0 @@
-../../../roles

+ 0 - 2
playbooks/aws/openshift-node/terminate.yml

@@ -1,2 +0,0 @@
----
-- include: ../terminate.yml

+ 0 - 64
playbooks/aws/terminate.yml

@@ -1,64 +0,0 @@
----
-- name: Populate oo_hosts_to_terminate host group
-  hosts: localhost
-  gather_facts: no
-  tasks:
-    - name: Evaluate oo_hosts_to_terminate
-      add_host: name={{ item }} groups=oo_hosts_to_terminate
-      with_items: oo_host_group_exp | default([])
-
-- name: Gather dynamic inventory variables for hosts to terminate
-  hosts: oo_hosts_to_terminate
-  gather_facts: no
-
-- name: Terminate instances
-  hosts: localhost
-  connection: local
-  gather_facts: no
-  vars:
-    host_vars: "{{ hostvars
-        | oo_select_keys(groups['oo_hosts_to_terminate']) }}"
-  tasks:
-    - name: Remove tags from instances
-      ec2_tag: resource={{ item.ec2_id }} region={{ item.ec2_region }} state=absent
-      args:
-        tags:
-          env: "{{ item['ec2_tag_env'] }}"
-          host-type: "{{ item['ec2_tag_host-type'] }}"
-          env-host-type: "{{ item['ec2_tag_env-host-type'] }}"
-      with_items: host_vars
-      when: "'oo_hosts_to_terminate' in groups"
-
-    - name: Terminate instances
-      ec2:
-        state: absent
-        instance_ids: ["{{ item.ec2_id }}"]
-        region: "{{ item.ec2_region }}"
-      ignore_errors: yes
-      register: ec2_term
-      with_items: host_vars
-      when: "'oo_hosts_to_terminate' in groups"
-
-    # Fail if any of the instances failed to terminate with an error other
-    # than 403 Forbidden
-    - fail: msg=Terminating instance {{ item.item.ec2_id }} failed with message {{ item.msg }}
-      when: "'oo_hosts_to_terminate' in groups and item.failed and not item.msg | search(\"error: EC2ResponseError: 403 Forbidden\")"
-      with_items: ec2_term.results
-
-    - name: Stop instance if termination failed
-      ec2:
-        state: stopped
-        instance_ids: ["{{ item.item.ec2_id }}"]
-        region: "{{ item.item.ec2_region }}"
-      register: ec2_stop
-      when: item.failed
-      with_items: ec2_term.results
-      when: "'oo_hosts_to_terminate' in groups"
-
-    - name: Rename stopped instances
-      ec2_tag: resource={{ item.item.item.ec2_id }} region={{ item.item.item.ec2_region }} state=present
-      args:
-        tags:
-          Name: "{{ item.item.item.ec2_tag_Name }}-terminate"
-      with_items: ec2_stop.results
-      when: "'oo_hosts_to_terminate' in groups"

+ 0 - 18
playbooks/gce/openshift-master/config.yml

@@ -1,18 +0,0 @@
----
-- name: Populate oo_masters_to_config host group
-  hosts: localhost
-  gather_facts: no
-  tasks:
-  - name: Evaluate oo_masters_to_config
-    add_host:
-      name: "{{ item }}"
-      groups: oo_masters_to_config
-      ansible_ssh_user: root
-    with_items: oo_host_group_exp | default([])
-
-- include: ../../common/openshift-master/config.yml
-  vars:
-    openshift_cluster_id: "{{ cluster_id }}"
-    openshift_debug_level: 4
-    openshift_deployment_type: "{{ deployment_type }}"
-    openshift_hostname: "{{ gce_private_ip }}"

+ 0 - 1
playbooks/gce/openshift-master/filter_plugins

@@ -1 +0,0 @@
-../../../filter_plugins

+ 0 - 51
playbooks/gce/openshift-master/launch.yml

@@ -1,51 +0,0 @@
----
-# TODO: when we are ready to go to ansible 1.9+ support only, we can update to
-# the gce task to use the disk_auto_delete parameter to avoid having to delete
-# the disk as a separate step on termination
-
-- name: Launch instance(s)
-  hosts: localhost
-  connection: local
-  gather_facts: no
-
-# TODO: modify image based on deployment_type
-  vars:
-    inst_names: "{{ oo_new_inst_names }}"
-    machine_type: n1-standard-1
-    image: libra-rhel7
-
-  tasks:
-    - name: Launch instances
-      gce:
-        instance_names: "{{ inst_names }}"
-        machine_type: "{{ machine_type }}"
-        image: "{{ image }}"
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        tags: "{{ oo_new_inst_tags }}"
-      register: gce
-
-    - name: Add new instances public IPs to oo_masters_to_config
-      add_host:
-        hostname: "{{ item.name }}"
-        ansible_ssh_host: "{{ item.public_ip }}"
-        groupname: oo_masters_to_config
-        gce_private_ip: "{{ item.private_ip }}"
-      with_items: gce.instance_data
-
-    - name: Wait for ssh
-      wait_for: port=22 host={{ item.public_ip }}
-      with_items: gce.instance_data
-
-    - name: Wait for root user setup
-      command: "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -o ConnectTimeout=10 -o UserKnownHostsFile=/dev/null root@{{ item.public_ip }} echo root user is setup"
-      register: result
-      until: result.rc == 0
-      retries: 20
-      delay: 10
-      with_items: gce.instance_data
-
-
-# Apply the configs, separate so that just the configs can be run by themselves
-- include: config.yml

+ 0 - 1
playbooks/gce/openshift-master/roles

@@ -1 +0,0 @@
-../../../roles

+ 0 - 35
playbooks/gce/openshift-master/terminate.yml

@@ -1,35 +0,0 @@
----
-- name: Populate oo_masters_to_terminate host group if needed
-  hosts: localhost
-  gather_facts: no
-  tasks:
-    - name: Evaluate oo_masters_to_terminate
-      add_host: name={{ item }} groups=oo_masters_to_terminate
-      with_items: oo_host_group_exp | default([])
-
-- name: Terminate master instances
-  hosts: localhost
-  connection: local
-  gather_facts: no
-  tasks:
-    - name: Terminate master instances
-      gce:
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        state: 'absent'
-        instance_names: "{{ groups['oo_masters_to_terminate'] }}"
-        disks: "{{ groups['oo_masters_to_terminate'] }}"
-      register: gce
-      when: "'oo_masters_to_terminate' in groups"
-
-    - name: Remove disks of instances
-      gce_pd:
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        name: "{{ item }}"
-        zone: "{{ gce.zone }}"
-        state: absent
-      with_items: gce.instance_names
-      when: "'oo_masters_to_terminate' in groups"

+ 0 - 25
playbooks/gce/openshift-node/config.yml

@@ -1,25 +0,0 @@
----
-- name: Populate oo_nodes_to_config and oo_first_master host groups
-  hosts: localhost
-  gather_facts: no
-  tasks:
-  - name: Evaluate oo_nodes_to_config
-    add_host:
-      name: "{{ item }}"
-      groups: oo_nodes_to_config
-      ansible_ssh_user: root
-    with_items: oo_host_group_exp | default([])
-  - name: Evaluate oo_first_master
-    add_host:
-      name: "{{ groups['tag_env-host-type-' ~ cluster_id ~ '-openshift-master'][0] }}"
-      groups: oo_first_master
-      ansible_ssh_user: root
-
-
-- include: ../../common/openshift-node/config.yml
-  vars:
-    openshift_cluster_id: "{{ cluster_id }}"
-    openshift_debug_level: 4
-    openshift_deployment_type: "{{ deployment_type }}"
-    openshift_first_master: "{{ groups.oo_first_master.0 }}"
-    openshift_hostname: "{{ gce_private_ip }}"

+ 0 - 1
playbooks/gce/openshift-node/filter_plugins

@@ -1 +0,0 @@
-../../../filter_plugins

+ 0 - 51
playbooks/gce/openshift-node/launch.yml

@@ -1,51 +0,0 @@
----
-# TODO: when we are ready to go to ansible 1.9+ support only, we can update to
-# the gce task to use the disk_auto_delete parameter to avoid having to delete
-# the disk as a separate step on termination
-
-- name: Launch instance(s)
-  hosts: localhost
-  connection: local
-  gather_facts: no
-
-# TODO: modify image based on deployment_type
-  vars:
-    inst_names: "{{ oo_new_inst_names }}"
-    machine_type: n1-standard-1
-    image: libra-rhel7
-
-  tasks:
-    - name: Launch instances
-      gce:
-        instance_names: "{{ inst_names }}"
-        machine_type: "{{ machine_type }}"
-        image: "{{ image }}"
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        tags: "{{ oo_new_inst_tags }}"
-      register: gce
-
-    - name: Add new instances public IPs to oo_nodes_to_config
-      add_host:
-        hostname: "{{ item.name }}"
-        ansible_ssh_host: "{{ item.public_ip }}"
-        groupname: oo_nodes_to_config
-        gce_private_ip: "{{ item.private_ip }}"
-      with_items: gce.instance_data
-
-    - name: Wait for ssh
-      wait_for: port=22 host={{ item.public_ip }}
-      with_items: gce.instance_data
-
-    - name: Wait for root user setup
-      command: "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -o ConnectTimeout=10 -o UserKnownHostsFile=/dev/null root@{{ item.public_ip }} echo root user is setup"
-      register: result
-      until: result.rc == 0
-      retries: 20
-      delay: 10
-      with_items: gce.instance_data
-
-
-# Apply the configs, separate so that just the configs can be run by themselves
-- include: config.yml

+ 0 - 1
playbooks/gce/openshift-node/roles

@@ -1 +0,0 @@
-../../../roles

+ 0 - 35
playbooks/gce/openshift-node/terminate.yml

@@ -1,35 +0,0 @@
----
-- name: Populate oo_nodes_to_terminate host group if needed
-  hosts: localhost
-  gather_facts: no
-  tasks:
-    - name: Evaluate oo_nodes_to_terminate
-      add_host: name={{ item }} groups=oo_nodes_to_terminate
-      with_items: oo_host_group_exp | default([])
-
-- name: Terminate node instances
-  hosts: localhost
-  connection: local
-  gather_facts: no
-  tasks:
-    - name: Terminate node instances
-      gce:
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        state: 'absent'
-        instance_names: "{{ groups['oo_nodes_to_terminate'] }}"
-        disks: "{{ groups['oo_nodes_to_terminate'] }}"
-      register: gce
-      when: "'oo_nodes_to_terminate' in groups"
-
-    - name: Remove disks of instances
-      gce_pd:
-        service_account_email: "{{ gce_service_account_email }}"
-        pem_file: "{{ gce_pem_file }}"
-        project_id: "{{ gce_project_id }}"
-        name: "{{ item }}"
-        zone: "{{ gce.zone }}"
-        state: absent
-      with_items: gce.instance_names
-      when: "'oo_nodes_to_terminate' in groups"