Browse Source

Merge pull request #9388 from chancez/metering_cloud_support

Add support to automate setup and config of RDS & S3 for openshift-metering
OpenShift Merge Robot 6 years ago
parent
commit
fdeb3fc488

+ 0 - 8
playbooks/openshift-metering/README.md

@@ -30,11 +30,3 @@ Then run:
 ```bash
 ansible-playbook playbooks/openshift-metering/config.yml
 ```
-
-## GCP Development
-
-The `gcp-config.yml` playbook is useful for ad-hoc installation in an existing GCE cluster:
-
-```bash
-ansible-playbook playbooks/openshift-metering/gcp-config.yml
-```

+ 1 - 6
playbooks/openshift-metering/config.yml

@@ -1,8 +1,3 @@
 ---
-- import_playbook: ../init/main.yml
-  vars:
-    l_init_fact_hosts: "oo_masters_to_config"
-    l_openshift_version_set_hosts: "oo_masters_to_config:!oo_first_master"
-    l_sanity_check_hosts: "{{ groups['oo_masters_to_config'] }}"
-
+- import_playbook: private/setup.yml
 - import_playbook: private/config.yml

+ 0 - 16
playbooks/openshift-metering/gcp-config.yml

@@ -1,16 +0,0 @@
----
-- hosts: localhost
-  connection: local
-  tasks:
-  - name: place all scale groups into Ansible groups
-    include_role:
-      name: openshift_gcp
-      tasks_from: setup_scale_group_facts.yml
-
-- import_playbook: ../init/main.yml
-  vars:
-    l_init_fact_hosts: "oo_masters_to_config"
-    l_openshift_version_set_hosts: "oo_masters_to_config:!oo_first_master"
-    l_sanity_check_hosts: "{{ groups['oo_masters_to_config'] }}"
-
-- import_playbook: private/config.yml

+ 9 - 7
playbooks/openshift-metering/private/config.yml

@@ -1,5 +1,5 @@
 ---
-- name: Openshift Metering Checkpoint Start
+- name: Openshift Metering Install Checkpoint Start
   hosts: all
   gather_facts: false
   tasks:
@@ -7,19 +7,21 @@
     run_once: true
     set_stats:
       data:
-        installer_phase_openshift_metering:
+        install_phase_openshift_metering:
           title: "Openshift Metering"
           playbook: "playbooks/openshift-metering/config.yml"
           status: "In Progress"
           start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
 
 
-- name: Configure Openshift Metering
+- name: Install Openshift Metering
   hosts: oo_first_master
-  roles:
-  - role: openshift_metering
+  tasks:
+  - include_role:
+      name: openshift_metering
+      tasks_from: install
 
-- name: Openshift Metering Checkpoint End
+- name: Openshift Metering Install Checkpoint End
   hosts: all
   gather_facts: false
   tasks:
@@ -27,6 +29,6 @@
     run_once: true
     set_stats:
       data:
-        installer_phase_openshift_metering:
+        install_phase_openshift_metering:
           status: "Complete"
           end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"

+ 6 - 0
playbooks/openshift-metering/private/setup.yml

@@ -0,0 +1,6 @@
+---
+- import_playbook: ../../init/main.yml
+  vars:
+    l_init_fact_hosts: "oo_masters_to_config"
+    l_openshift_version_set_hosts: "oo_masters_to_config:!oo_first_master"
+    l_sanity_check_hosts: "{{ groups['oo_masters_to_config'] }}"

+ 34 - 0
playbooks/openshift-metering/private/uninstall.yml

@@ -0,0 +1,34 @@
+---
+- name: Openshift Metering Uninstall Checkpoint Start
+  hosts: all
+  gather_facts: false
+  tasks:
+  - name: Set Openshift Metering 'In Progress'
+    run_once: true
+    set_stats:
+      data:
+        uninstall_phase_openshift_metering:
+          title: "Openshift Metering"
+          playbook: "playbooks/openshift-metering/uninstall.yml"
+          status: "In Progress"
+          start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
+
+
+- name: Uninstall Openshift Metering
+  hosts: oo_first_master
+  tasks:
+  - include_role:
+      name: openshift_metering
+      tasks_from: uninstall
+
+- name: Openshift Metering Uninstall Checkpoint End
+  hosts: all
+  gather_facts: false
+  tasks:
+  - name: Set Openshift Metering 'Complete'
+    run_once: true
+    set_stats:
+      data:
+        uninstall_phase_openshift_metering:
+          status: "Complete"
+          end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"

+ 3 - 0
playbooks/openshift-metering/uninstall.yml

@@ -0,0 +1,3 @@
+---
+- import_playbook: private/setup.yml
+- import_playbook: private/uninstall.yml

+ 3 - 1
roles/openshift_metering/README.md

@@ -37,11 +37,13 @@ Updating the operator itself to a custom image can be done by setting `openshift
 For example:
 
 ```
-openshift_metering_config: quay.io/coreos/chargeback-helm-operator:latest
+openshift_metering_operator_image: quay.io/coreos/chargeback-helm-operator:latest
 ```
 
 Using a custom project/namespace can be done by specifying `__openshift_metering_namespace`.
 
+For a full list of variables, and descriptions of what they do see the [defaults/main.yml](defaults/main.yml) variables file.
+
 ## License
 
 Apache License, Version 2.0

+ 126 - 0
roles/openshift_metering/defaults/main.yml

@@ -1,4 +1,130 @@
 ---
+openshift_metering_install: true
 openshift_metering_operator_image: ''
 
 openshift_metering_config: null
+
+# Configures AWS Access credentials on all pods which use it for communicating
+# with S3.
+openshift_metering_aws_access_key_id: ""
+openshift_metering_aws_secret_access_key: ""
+
+# RDS Database for custom Hive Metastore DB options
+openshift_metering_hive_metastore_db_use_rds: false
+# If openshift_metering_hive_metastore_db_rds_create is true, creates an RDS
+# database for Hive Metastore metadata.
+openshift_metering_hive_metastore_db_rds_create: "{{ openshift_metering_hive_metastore_db_use_rds }}"
+# If openshift_metering_hive_metastore_db_rds_delete is true, delete the RDS
+# database specified by openshift_metering_hive_metastore_db_rds_instance_name.
+openshift_metering_hive_metastore_db_rds_delete: false
+openshift_metering_hive_metastore_db_rds_instance_name: 'metering-hive-metastore'
+openshift_metering_hive_metastore_db_rds_instance_db_name: 'metering_hive_metastore'
+openshift_metering_hive_metastore_db_rds_instance_db_engine: 'MySQL'
+openshift_metering_hive_metastore_db_rds_instance_size: 10
+openshift_metering_hive_metastore_db_rds_instance_type: 'db.m1.small'
+openshift_metering_hive_metastore_db_rds_instance_username: 'hive'
+openshift_metering_hive_metastore_db_rds_instance_password: null
+openshift_metering_hive_metastore_db_rds_instance_publicly_accessible: false
+openshift_metering_hive_metastore_db_rds_instance_wait: true
+openshift_metering_hive_metastore_db_rds_instance_wait_timeout: 600
+openshift_metering_hive_metastore_db_rds_subnet_group: null
+openshift_metering_hive_metastore_db_rds_vpc_security_groups: null
+openshift_metering_hive_metastore_db_rds_apply_immediately: true
+
+
+# If openshift_metering_hive_metastore_db_use_rds is true, then this variable
+# will set the default of the other hvie_metastore_db values to use the RDS
+# database specified. Otherwise Hive Metastore will use a embedded derby
+# database in a volume.
+openshift_metering_hive_metastore_db_deployment_type: "{{ openshift_metering_hive_metastore_db_use_rds | ternary('rds', 'derby_local') }}"
+
+# Custom Hive Metastore DB options
+openshift_metering_hive_metastore_db_use_custom: "{{ l_openshift_metering_hive_metastore_db_dict[openshift_metering_hive_metastore_db_deployment_type].use_custom }}"
+openshift_metering_hive_metastore_db_engine: "{{ l_openshift_metering_hive_metastore_db_dict[openshift_metering_hive_metastore_db_deployment_type].engine }}"
+openshift_metering_hive_metastore_db_driver: "{{ l_openshift_metering_hive_metastore_db_dict[openshift_metering_hive_metastore_db_deployment_type].driver }}"
+openshift_metering_hive_metastore_db_url: "{{ l_openshift_metering_hive_metastore_db_url_dict[openshift_metering_hive_metastore_db_deployment_type].url }}"
+openshift_metering_hive_metastore_db_username: "{{ l_openshift_metering_hive_metastore_db_dict[openshift_metering_hive_metastore_db_deployment_type].username }}"
+openshift_metering_hive_metastore_db_password: "{{ l_openshift_metering_hive_metastore_db_dict[openshift_metering_hive_metastore_db_deployment_type].password }}"
+
+# S3 default storage options. If openshift_metering_default_storage_use_s3 is
+# true, then the openshift_metering_default_storage_* variables will be set to
+# use the S3 specified S3 bucket.
+openshift_metering_default_storage_use_s3: false
+
+# If openshift_metering_s3_bucket_create is true, creates an S3 bucket to store
+# metering data in.
+openshift_metering_s3_bucket_create: "{{ openshift_metering_default_storage_use_s3 }}"
+# If openshift_metering_s3_bucket_delete is true, delete the bucket specified
+# by openshift_metering_s3_storage_bucket_name.
+openshift_metering_s3_bucket_delete: false
+openshift_metering_s3_storage_bucket_name: openshift-metering-storage
+openshift_metering_s3_storage_bucket_path: metering-data
+
+# If openshift_metering_default_storage_use_s3 is set, the default storage
+# variables will be set to use the S3 bucket defined by the
+# openshift_metering_s3_* variables. Otherwise use HDFS with no customizations
+# to the default_storage variables.
+openshift_metering_default_storage_deployment_type: "{{ openshift_metering_default_storage_use_s3 | ternary('s3', 'hdfs') }}"
+
+# If openshift_metering_hdfs_enabled is true, HDFS will be installed. Otherwise
+# HDFS will be disabled and no HDFS pods will be created.
+# When set to false, then openshift_metering_default_storage_* options or
+# openshift_mtering_s3_storage_* options should be specified.
+openshift_metering_hdfs_enabled: "{{ l_openshift_metering_default_storage_dict[openshift_metering_default_storage_deployment_type].hdfs_enabled }}"
+
+# Custom Default Storage location options
+openshift_metering_default_storage_use_custom: "{{ l_openshift_metering_default_storage_dict[openshift_metering_default_storage_deployment_type].use_custom }}"
+openshift_metering_default_storage_name: "{{ l_openshift_metering_default_storage_dict[openshift_metering_default_storage_deployment_type].name }}"
+openshift_metering_default_storage_type: "{{ l_openshift_metering_default_storage_dict[openshift_metering_default_storage_deployment_type].type }}"
+openshift_metering_default_storage_config: "{{ l_openshift_metering_default_storage_dict[openshift_metering_default_storage_deployment_type].config }}"
+
+# Below are just dicts for mapping how variable values should be set by default
+# according to other variables values. Users should not modify any of these
+# values
+
+# a map of db engine to JDBC driver
+l_db_engine_to_driver:
+  mysql: 'com.mysql.jdbc.Driver'
+  postgres: 'org.postgresql.Driver'
+
+# Mapping of metastore_db values depending on if the deployment uses derby or
+# RDS
+l_openshift_metering_hive_metastore_db_dict:
+  derby_local:
+    use_custom: false
+    driver: null
+    username: null
+    password: null
+  rds:
+    use_custom: true
+    engine: "{{ openshift_metering_hive_metastore_db_rds_instance_db_engine | lower }}"
+    driver: "{{ l_db_engine_to_driver[(openshift_metering_hive_metastore_db_rds_instance_db_engine | lower)] }}"
+    username: "{{ openshift_metering_hive_metastore_db_rds_instance_username }}"
+    password: "{{ openshift_metering_hive_metastore_db_rds_instance_password }}"
+
+# We use a separate dict here to avoid undefined variable access since this
+# depends on openshift_metering_hive_metastore_db_facts which is a registered
+# var from the rds install task
+l_openshift_metering_hive_metastore_db_url_dict:
+  derby_local:
+    url: null
+  rds:
+    url: "jdbc:{{ openshift_metering_hive_metastore_db_facts.instance.engine }}://{{ openshift_metering_hive_metastore_db_facts.instance.endpoint }}:{{ openshift_metering_hive_metastore_db_facts.instance.port }}/{{ openshift_metering_hive_metastore_db_facts.instance.db_name }}"
+
+# Mapping of default_storage values depending on if using HDFS or S3.
+l_openshift_metering_default_storage_dict:
+  hdfs:
+    hdfs_enabled: true
+    use_custom: false
+    name: ""
+    type: ""
+    config: null
+  s3:
+    hdfs_enabled: false
+    use_custom: true
+    name: "{{ openshift_metering_s3_storage_bucket_name }}"
+    type: "hive"
+    config:
+      hive:
+        tableProperties:
+          location: "s3a://{{ openshift_metering_s3_storage_bucket_name }}/{{ openshift_metering_s3_storage_bucket_path | default('') }}"

roles/openshift_metering/meta/main.yaml → roles/openshift_metering/meta/main.yml


+ 8 - 0
roles/openshift_metering/tasks/deprovision-cloud-resources.yml

@@ -0,0 +1,8 @@
+---
+- name: Openshift Metering RDS Database teardown
+  import_tasks: rds-database-uninstall.yml
+  when: openshift_metering_hive_metastore_db_rds_delete | bool
+
+- name: Openshift Metering S3 Bucket teardown
+  import_tasks: s3-bucket-storage-uninstall.yml
+  when: openshift_metering_s3_bucket_delete | bool

+ 5 - 0
roles/openshift_metering/tasks/install.yml

@@ -0,0 +1,5 @@
+---
+- name: Openshift Metering Cloud resource provisioning
+  import_tasks: provision-cloud-resources.yml
+- name: Openshift Metering Operator installation
+  include_tasks: operator-install.yml

+ 0 - 6
roles/openshift_metering/tasks/main.yaml

@@ -1,6 +0,0 @@
----
-- include_tasks: install.yaml
-  when: openshift_metering_install | default(true) | bool
-
-- include_tasks: uninstall.yaml
-  when: not openshift_metering_install | default(true) | bool

+ 2 - 0
roles/openshift_metering/tasks/main.yml

@@ -0,0 +1,2 @@
+---
+# Use import_role with tasks_from to specify the correct task to run

+ 57 - 0
roles/openshift_metering/tasks/install.yaml

@@ -37,6 +37,63 @@
       value: "{{ openshift_metering_config }}"
   when: openshift_metering_config != None
 
+- name: Update Metering config with custom database config for Hive metastore
+  yedit:
+    src: "{{ mktemp.stdout }}/metering-manifests/operator/metering.yaml"
+    edits:
+    - key: spec.presto.hive.config.dbConnectionURL
+      value: "{{ openshift_metering_hive_metastore_db_url }}"
+    - key: spec.presto.hive.config.dbDriver
+      value: "{{ openshift_metering_hive_metastore_db_driver }}"
+    - key: spec.presto.hive.config.dbConnectionUsername
+      value: "{{ openshift_metering_hive_metastore_db_username }}"
+    - key: spec.presto.hive.config.dbConnectionPassword
+      value: "{{ openshift_metering_hive_metastore_db_password }}"
+  when: openshift_metering_hive_metastore_db_use_custom | bool
+
+- name: Update Metering config with custom default storage location
+  yedit:
+    src: "{{ mktemp.stdout }}/metering-manifests/operator/metering.yaml"
+    edits:
+    - key: spec.metering-operator.config.defaultStorage
+      value:
+        create: true
+        isDefault: true
+        name: "{{ openshift_metering_default_storage_name }}"
+        type: "{{ openshift_metering_default_storage_type }}"
+    - key: "spec.metering-operator.config.defaultStorage.{{ openshift_metering_default_storage_type }}"
+      value: "{{ openshift_metering_default_storage_config[openshift_metering_default_storage_type] }}"
+  when: openshift_metering_default_storage_use_custom | bool
+
+- name: Update Metering config to enable or disable HDFS
+  yedit:
+    src: "{{ mktemp.stdout }}/metering-manifests/operator/metering.yaml"
+    edits:
+    - key: spec.hdfs.enabled
+      value: "{{ openshift_metering_hdfs_enabled }}"
+  when: openshift_metering_default_storage_use_custom | bool
+
+- name: Update Metering config with AWS credentials
+  yedit:
+    src: "{{ mktemp.stdout }}/metering-manifests/operator/metering.yaml"
+    edits:
+    - key: "{{ item.key }}"
+      value: "{{ item.value }}"
+  # If the value is empty, then the credentials aren't set and we don't need to
+  # make the edit
+  when: item.value != ""
+  with_items:
+  - key: spec.metering-operator.config.awsAccessKeyID
+    value: "{{ openshift_metering_aws_access_key_id }}"
+  - key: spec.metering-operator.config.awsSecretAccessKey
+    value: "{{ openshift_metering_aws_secret_access_key }}"
+  - key: spec.presto.config.awsAccessKeyID
+    value: "{{ openshift_metering_aws_access_key_id }}"
+  - key: spec.presto.config.awsSecretAccessKey
+    value: "{{ openshift_metering_aws_secret_access_key }}"
+  # Hide secrets
+  no_log: True
+
 - name: Fetch updated operator manifests
   fetch:
     src: "{{ item.dest }}"

roles/openshift_metering/tasks/uninstall.yaml → roles/openshift_metering/tasks/operator-uninstall.yml


+ 8 - 0
roles/openshift_metering/tasks/provision-cloud-resources.yml

@@ -0,0 +1,8 @@
+---
+- name: Openshift Metering RDS Database setup
+  import_tasks: rds-database-install.yml
+  when: openshift_metering_hive_metastore_db_rds_create | bool
+
+- name: Openshift Metering S3 Bucket setup
+  import_tasks: s3-bucket-storage-install.yml
+  when: openshift_metering_s3_bucket_create | bool

+ 40 - 0
roles/openshift_metering/tasks/rds-database-install.yml

@@ -0,0 +1,40 @@
+---
+- name: Validate Metering Hive Metastore DB engine
+  fail:
+    msg: "No Hive metastore DB driver set, invalid DB engine for Hive metastore: {{ openshift_metering_hive_metastore_db_engine }}"
+  when: not openshift_metering_hive_metastore_db_driver
+
+- name: Create Metering Hive metadata database using RDS
+  local_action:
+    module: rds
+    command: create
+    instance_name: "{{ openshift_metering_hive_metastore_db_rds_instance_name }}"
+    db_name: "{{ openshift_metering_hive_metastore_db_rds_instance_db_name }}"
+    db_engine: "{{ openshift_metering_hive_metastore_db_rds_instance_db_engine }}"
+    size: "{{ openshift_metering_hive_metastore_db_rds_instance_size }}"
+    instance_type: "{{ openshift_metering_hive_metastore_db_rds_instance_type }}"
+    username: "{{ openshift_metering_hive_metastore_db_rds_instance_username }}"
+    password: "{{ openshift_metering_hive_metastore_db_rds_instance_password }}"
+    publicly_accessible: "{{ openshift_metering_hive_metastore_db_rds_instance_publicly_accessible }}"
+    wait: "{{ openshift_metering_hive_metastore_db_rds_instance_wait }}"
+    wait_timeout: "{{ openshift_metering_hive_metastore_db_rds_instance_wait_timeout }}"
+    vpc_security_groups: "{{ openshift_metering_hive_metastore_db_rds_vpc_security_groups }}"
+    tags:
+      application: openshift-metering-hive-metastore
+  register: openshift_metering_hive_metastore_db_facts
+  when: openshift_metering_hive_metastore_db_rds_create | bool
+
+- name: Update Metering Hive metadata RDS database configuration
+  local_action:
+    module: rds
+    command: modify
+    instance_name: "{{ openshift_metering_hive_metastore_db_rds_instance_name }}"
+    size: "{{ openshift_metering_hive_metastore_db_rds_instance_size }}"
+    instance_type: "{{ openshift_metering_hive_metastore_db_rds_instance_type }}"
+    password: "{{ openshift_metering_hive_metastore_db_rds_instance_password }}"
+    wait: "{{ openshift_metering_hive_metastore_db_rds_instance_wait }}"
+    wait_timeout: "{{ openshift_metering_hive_metastore_db_rds_instance_wait_timeout }}"
+    vpc_security_groups: "{{ openshift_metering_hive_metastore_db_rds_vpc_security_groups }}"
+    apply_immediately: "{{ openshift_metering_hive_metastore_db_rds_apply_immediately }}"
+  register: openshift_metering_hive_metastore_db_facts
+  when: openshift_metering_hive_metastore_db_rds_create | bool

+ 7 - 0
roles/openshift_metering/tasks/rds-database-uninstall.yml

@@ -0,0 +1,7 @@
+---
+- name: Delete  Metering Hive metadata RDS database
+  local_action:
+    module: rds
+    command: delete
+    instance_name: "{{ openshift_metering_hive_metastore_db_rds_instance_name }}"
+  when: openshift_metering_hive_metastore_db_rds_delete | bool

+ 7 - 0
roles/openshift_metering/tasks/s3-bucket-storage-install.yml

@@ -0,0 +1,7 @@
+---
+- name: Create Metering S3 bucket for Storage using AWS S3
+  local_action:
+    module: s3_bucket
+    state: present
+    name: "{{ openshift_metering_s3_storage_bucket_name }}"
+  when: openshift_metering_s3_bucket_create | bool

+ 9 - 0
roles/openshift_metering/tasks/s3-bucket-storage-uninstall.yml

@@ -0,0 +1,9 @@
+---
+- name: Delete Metering S3 bucket for Storage using AWS S3
+  local_action:
+    module: s3_bucket
+    state: absent
+    name: "{{ openshift_metering_s3_storage_bucket_name }}"
+    # delete all keys, since a bucket must be empty to be deleted
+    force: yes
+  when: openshift_metering_s3_bucket_delete | bool

+ 5 - 0
roles/openshift_metering/tasks/uninstall.yml

@@ -0,0 +1,5 @@
+---
+- name: Openshift Metering Operator uninstall
+  import_tasks: operator-uninstall.yml
+- name: Openshift Metering Cloud resource deprovisioning
+  import_tasks: deprovision-cloud-resources.yml