Parcourir la source

Merge pull request #2334 from smunilla/BZ1358951

Bug 1358951 - Error loading config, no such key: 'deployment' when using ing previously valid answers file
Scott Dodson il y a 8 ans
Parent
commit
57a794122c

+ 31 - 26
utils/docs/config.md

@@ -7,31 +7,38 @@ The default location this config file will be written to ~/.config/openshift/ins
 ## Example
 
 ```
-version: v1
+version: v2
 variant: openshift-enterprise
-variant_version: 3.0
-ansible_ssh_user: root
-hosts:
-- ip: 10.0.0.1
-  hostname: master-private.example.com
-  public_ip: 24.222.0.1
-  public_hostname: master.example.com
-  master: true
-  node: true
-  containerized: true
-  connect_to: 24.222.0.1
-- ip: 10.0.0.2
-  hostname: node1-private.example.com
-  public_ip: 24.222.0.2
-  public_hostname: node1.example.com
-  node: true
-  connect_to: 10.0.0.2
-- ip: 10.0.0.3
-  hostname: node2-private.example.com
-  public_ip: 24.222.0.3
-  public_hostname: node2.example.com
-  node: true
-  connect_to: 10.0.0.3
+variant_version: 3.3
+deployment:
+  ansible_ssh_user: root
+  hosts:
+  - connect_to: 24.222.0.1
+    ip: 10.0.0.1
+    hostname: master-private.example.com
+    public_ip: 24.222.0.1
+    public_hostname: master.example.com
+    roles:
+      - master
+      - node
+    containerized: true
+  - connect_to: 10.0.0.2
+    ip: 10.0.0.2
+    hostname: node1-private.example.com
+    public_ip: 24.222.0.2
+    public_hostname: node1.example.com
+    roles:
+      - node
+  - connect_to: 10.0.0.3
+    ip: 10.0.0.3
+    hostname: node2-private.example.com
+    public_ip: 24.222.0.3
+    public_hostname: node2.example.com
+    roles:
+      - node
+  roles:
+    master:
+    node:
 ```
 
 ## Primary Settings
@@ -76,5 +83,3 @@ Defines the user ansible will use to ssh to remote systems for gathering facts a
 ### ansible_log_path
 
 Default: /tmp/ansible.log
-
-

+ 47 - 31
utils/src/ooinstall/oo_config.py

@@ -165,22 +165,34 @@ class OOConfig(object):
         self._set_defaults()
 
 
+# pylint: disable=too-many-branches
     def _read_config(self):
+        def _print_read_config_error(error, path='the configuration file'):
+            message = """
+Error loading config. {}.
+
+See https://docs.openshift.com/enterprise/latest/install_config/install/quick_install.html#defining-an-installation-configuration-file
+for information on creating a configuration file or delete {} and re-run the installer.
+"""
+            print message.format(error, path)
+
         try:
             if os.path.exists(self.config_path):
                 with open(self.config_path, 'r') as cfgfile:
                     loaded_config = yaml.safe_load(cfgfile.read())
 
-                # Use the presence of a Description as an indicator this is
-                # a legacy config file:
-                if 'Description' in self.settings:
-                    self._upgrade_legacy_config()
+                if not 'version' in loaded_config:
+                    _print_read_config_error('Legacy configuration file found', self.config_path)
+                    sys.exit(0)
+
+                if loaded_config.get('version', '') == 'v1':
+                    loaded_config = self._upgrade_v1_config(loaded_config)
 
                 try:
                     host_list = loaded_config['deployment']['hosts']
                     role_list = loaded_config['deployment']['roles']
                 except KeyError as e:
-                    print "Error loading config, no such key: {}".format(e)
+                    _print_read_config_error("No such key: {}".format(e), self.config_path)
                     sys.exit(0)
 
                 for setting in CONFIG_PERSIST_SETTINGS:
@@ -213,32 +225,36 @@ class OOConfig(object):
             raise OOConfigFileError(
                 'Config file "{}" is not a valid YAML document'.format(self.config_path))
 
-    def _upgrade_legacy_config(self):
-        new_hosts = []
-        remove_settings = ['validated_facts', 'Description', 'Name',
-            'Subscription', 'Vendor', 'Version', 'masters', 'nodes']
-
-        if 'validated_facts' in self.settings:
-            for key, value in self.settings['validated_facts'].iteritems():
-                value['connect_to'] = key
-                if 'masters' in self.settings and key in self.settings['masters']:
-                    value['master'] = True
-                if 'nodes' in self.settings and key in self.settings['nodes']:
-                    value['node'] = True
-                new_hosts.append(value)
-        self.settings['hosts'] = new_hosts
-
-        for s in remove_settings:
-            if s in self.settings:
-                del self.settings[s]
-
-        # A legacy config implies openshift-enterprise 3.0:
-        self.settings['variant'] = 'openshift-enterprise'
-        self.settings['variant_version'] = '3.0'
-
-    def _upgrade_v1_config(self):
-        #TODO write code to upgrade old config
-        return
+    def _upgrade_v1_config(self, config):
+        new_config_data = {}
+        new_config_data['deployment'] = {}
+        new_config_data['deployment']['hosts'] = []
+        new_config_data['deployment']['roles'] = {}
+        new_config_data['deployment']['variables'] = {}
+
+        role_list = {}
+
+        if config.get('ansible_ssh_user', False):
+            new_config_data['deployment']['ansible_ssh_user'] = config['ansible_ssh_user']
+
+        for host in config['hosts']:
+            host_props = {}
+            host_props['roles'] = []
+            host_props['connect_to'] = host['connect_to']
+
+            for prop in ['ip', 'public_ip', 'hostname', 'public_hostname', 'containerized', 'preconfigured']:
+                host_props[prop] = host.get(prop, None)
+
+            for role in ['master', 'node', 'master_lb', 'storage', 'etcd']:
+                if host.get(role, False):
+                    host_props['roles'].append(role)
+                    role_list[role] = ''
+
+            new_config_data['deployment']['hosts'].append(host_props)
+
+        new_config_data['deployment']['roles'] = role_list
+
+        return new_config_data
 
     def _set_defaults(self):
 

+ 6 - 0
utils/test/cli_installer_tests.py

@@ -101,6 +101,7 @@ MOCK_FACTS_QUICKHA = {
 # Missing connect_to on some hosts:
 BAD_CONFIG = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -132,6 +133,7 @@ deployment:
 
 QUICKHA_CONFIG = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -189,6 +191,7 @@ deployment:
 
 QUICKHA_2_MASTER_CONFIG = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -238,6 +241,7 @@ deployment:
 
 QUICKHA_CONFIG_REUSED_LB = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -281,6 +285,7 @@ deployment:
 
 QUICKHA_CONFIG_NO_LB = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -323,6 +328,7 @@ deployment:
 
 QUICKHA_CONFIG_PRECONFIGURED_LB = """
 variant: %s
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:

+ 1 - 0
utils/test/fixture.py

@@ -12,6 +12,7 @@ SAMPLE_CONFIG = """
 variant: %s
 variant_version: 3.3
 master_routingconfig_subdomain: example.com
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:

+ 3 - 19
utils/test/oo_config_tests.py

@@ -13,6 +13,7 @@ from ooinstall.oo_config import OOConfig, Host, OOConfigInvalidHostError
 SAMPLE_CONFIG = """
 variant: openshift-enterprise
 variant_version: 3.3
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -43,27 +44,9 @@ deployment:
         node:
 """
 
-# Used to test automatic upgrading of config:
-LEGACY_CONFIG = """
-Description: This is the configuration file for the OpenShift Ansible-Based Installer.
-Name: OpenShift Ansible-Based Installer Configuration
-Subscription: {type: none}
-Vendor: OpenShift Community
-Version: 0.0.1
-ansible_config: /tmp/notreal/ansible.cfg
-ansible_inventory_directory: /tmp/notreal/.config/openshift/.ansible
-ansible_log_path: /tmp/ansible.log
-ansible_plugins_directory: /tmp/notreal/.python-eggs/ooinstall-3.0.0-py2.7.egg-tmp/ooinstall/ansible_plugins
-masters: [10.0.0.1]
-nodes: [10.0.0.2, 10.0.0.3]
-validated_facts:
-  10.0.0.1: {hostname: master-private.example.com, ip: 10.0.0.1, public_hostname: master.example.com, public_ip: 24.222.0.1}
-  10.0.0.2: {hostname: node1-private.example.com, ip: 10.0.0.2, public_hostname: node1.example.com, public_ip: 24.222.0.2}
-  10.0.0.3: {hostname: node2-private.example.com, ip: 10.0.0.3, public_hostname: node2.example.com, public_ip: 24.222.0.3}
-"""
-
 
 CONFIG_INCOMPLETE_FACTS = """
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts:
@@ -91,6 +74,7 @@ deployment:
 
 CONFIG_BAD = """
 variant: openshift-enterprise
+version: v2
 deployment:
     ansible_ssh_user: root
     hosts: