diff --git a/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py b/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py index d3d4a75b2..11f13689e 100644 --- a/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py @@ -10,7 +10,6 @@ import os from oslo_log import log as logging from oslo_messaging import RemoteError import pecan -import tsconfig.tsconfig as tsc import yaml from dcmanager.api.controllers import restcomm @@ -170,41 +169,19 @@ class PhasedSubcloudDeployController(object): payload = get_create_payload(request) - if not payload: - pecan.abort(400, _('Body required')) - - psd_common.validate_bootstrap_values(payload) - - # If a subcloud release is not passed, use the current - # system controller software_version - payload['software_version'] = payload.get('release', tsc.SW_VERSION) - - psd_common.validate_subcloud_name_availability(context, payload['name']) - - psd_common.validate_system_controller_patch_status("create") - - psd_common.validate_subcloud_config(context, payload) - - psd_common.validate_install_values(payload) - - psd_common.validate_k8s_version(payload) - - psd_common.format_ip_address(payload) - - # Upload the deploy config files if it is included in the request - # It has a dependency on the subcloud name, and it is called after - # the name has been validated - psd_common.upload_deploy_config_file(request, payload) + psd_common.pre_deploy_create(payload, context, request) try: # Add the subcloud details to the database subcloud = psd_common.add_subcloud_to_database(context, payload) - # Ask dcmanager-manager to add the subcloud. + # Ask dcmanager-manager to create the subcloud. # It will do all the real work... subcloud = self.dcmanager_rpc_client.subcloud_deploy_create( context, subcloud.id, payload) - return subcloud + + subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud) + return subcloud_dict except RemoteError as e: pecan.abort(httpclient.UNPROCESSABLE_ENTITY, e.value) diff --git a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py index ef2f7b6fb..c47381afc 100644 --- a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py +++ b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py @@ -23,11 +23,6 @@ from requests_toolbelt.multipart import decoder import base64 import json import keyring -from netaddr import AddrFormatError -from netaddr import IPAddress -from netaddr import IPNetwork -from netaddr import IPRange -import os from oslo_config import cfg from oslo_log import log as logging from oslo_messaging import RemoteError @@ -40,9 +35,6 @@ from pecan import request from dccommon import consts as dccommon_consts from dccommon.drivers.openstack.fm import FmClient -from dccommon.drivers.openstack import patching_v1 -from dccommon.drivers.openstack.patching_v1 import PatchingClient -from dccommon.drivers.openstack.sdk_platform import OpenStackDriver from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from dccommon import exceptions as dccommon_exceptions @@ -56,13 +48,13 @@ from dcmanager.api import policy from dcmanager.common import consts from dcmanager.common import exceptions from dcmanager.common.i18n import _ +from dcmanager.common import phased_subcloud_deploy as psd_common from dcmanager.common import prestage from dcmanager.common import utils from dcmanager.db import api as db_api from dcmanager.rpc import client as rpc_client from fm_api.constants import FM_ALARM_ID_UNSYNCHRONIZED_RESOURCE -from six.moves import range CONF = cfg.CONF LOG = logging.getLogger(__name__) @@ -136,64 +128,6 @@ class SubcloudsController(object): # Route the request to specific methods with parameters pass - def _validate_group_id(self, context, group_id): - try: - # The DB API will raise an exception if the group_id is invalid - db_api.subcloud_group_get(context, group_id) - except Exception as e: - LOG.exception(e) - pecan.abort(400, _("Invalid group_id")) - - @staticmethod - def _get_common_deploy_files(payload, software_version): - for f in consts.DEPLOY_COMMON_FILE_OPTIONS: - # Skip the prestage_images option as it is not relevant in this - # context - if f == consts.DEPLOY_PRESTAGE: - continue - filename = None - dir_path = os.path.join(dccommon_consts.DEPLOY_DIR, software_version) - if os.path.isdir(dir_path): - filename = utils.get_filename_by_prefix(dir_path, f + '_') - if filename is None: - pecan.abort(400, _("Missing required deploy file for %s") % f) - payload.update({f: os.path.join(dir_path, filename)}) - - def _upload_deploy_config_file(self, request, payload): - if consts.DEPLOY_CONFIG in request.POST: - file_item = request.POST[consts.DEPLOY_CONFIG] - filename = getattr(file_item, 'filename', '') - if not filename: - pecan.abort(400, _("No %s file uploaded" - % consts.DEPLOY_CONFIG)) - file_item.file.seek(0, os.SEEK_SET) - contents = file_item.file.read() - # the deploy config needs to upload to the override location - fn = self._get_config_file_path(payload['name'], consts.DEPLOY_CONFIG) - self._upload_config_file(contents, fn, consts.DEPLOY_CONFIG) - payload.update({consts.DEPLOY_CONFIG: fn}) - self._get_common_deploy_files(payload, payload['software_version']) - - @staticmethod - def _get_request_data(request): - payload = dict() - for f in SUBCLOUD_ADD_MANDATORY_FILE: - if f not in request.POST: - pecan.abort(400, _("Missing required file for %s") % f) - - for f in SUBCLOUD_ADD_GET_FILE_CONTENTS: - if f in request.POST: - file_item = request.POST[f] - file_item.file.seek(0, os.SEEK_SET) - data = yaml.safe_load(file_item.file.read().decode('utf8')) - if f == BOOTSTRAP_VALUES: - payload.update(data) - else: - payload.update({f: data}) - del request.POST[f] - payload.update(request.POST) - return payload - @staticmethod def _get_patch_data(request): payload = dict() @@ -248,15 +182,6 @@ class SubcloudsController(object): payload[consts.PRESTAGE_REQUEST_RELEASE] = val return payload - def _upload_config_file(self, file_item, config_file, config_type): - try: - with open(config_file, "w") as f: - f.write(file_item.decode('utf8')) - except Exception: - msg = _("Failed to upload %s file" % config_type) - LOG.exception(msg) - pecan.abort(400, msg) - def _get_reconfig_payload(self, request, subcloud_name, software_version): payload = dict() multipart_data = decoder.MultipartDecoder( @@ -268,65 +193,16 @@ class SubcloudsController(object): hv = hv.decode('utf8') if hk.decode('utf8') == 'Content-Disposition': if filename in hv: - fn = self._get_config_file_path( + fn = psd_common.get_config_file_path( subcloud_name, consts.DEPLOY_CONFIG) - self._upload_config_file( + psd_common.upload_config_file( part.content, fn, consts.DEPLOY_CONFIG) payload.update({consts.DEPLOY_CONFIG: fn}) elif "sysadmin_password" in hv: payload.update({'sysadmin_password': part.content}) - self._get_common_deploy_files(payload, software_version) + psd_common.get_common_deploy_files(payload, software_version) return payload - def _get_config_file_path(self, subcloud_name, config_file_type=None): - if config_file_type == consts.DEPLOY_CONFIG: - file_path = os.path.join( - dccommon_consts.ANSIBLE_OVERRIDES_PATH, - subcloud_name + '_' + config_file_type + '.yml' - ) - elif config_file_type == INSTALL_VALUES: - file_path = os.path.join( - dccommon_consts.ANSIBLE_OVERRIDES_PATH + '/' + subcloud_name, - config_file_type + '.yml' - ) - else: - file_path = os.path.join( - dccommon_consts.ANSIBLE_OVERRIDES_PATH, - subcloud_name + '.yml' - ) - return file_path - - @staticmethod - def _get_subcloud_db_install_values(subcloud): - if not subcloud.data_install: - msg = _("Failed to read data install from db") - LOG.exception(msg) - pecan.abort(400, msg) - - install_values = json.loads(subcloud.data_install) - - # mandatory bootstrap parameters - mandatory_bootstrap_parameters = [ - 'bootstrap_interface', - 'bootstrap_address', - 'bootstrap_address_prefix', - 'bmc_username', - 'bmc_address', - 'bmc_password', - ] - for p in mandatory_bootstrap_parameters: - if p not in install_values: - msg = _("Failed to get %s from data_install" % p) - LOG.exception(msg) - pecan.abort(400, msg) - - install_values.update({ - 'ansible_become_pass': consts.TEMP_SYSADMIN_PASSWORD, - 'ansible_ssh_pass': consts.TEMP_SYSADMIN_PASSWORD - }) - - return install_values - @staticmethod def _get_updatestatus_payload(request): """retrieve payload of a patch request for update_status @@ -339,269 +215,6 @@ class SubcloudsController(object): payload.update(json.loads(request.body)) return payload - def _validate_subcloud_config(self, context, payload, operation=None): - """Check whether subcloud config is valid.""" - - # Validate the name - if payload.get('name').isdigit(): - pecan.abort(400, _("name must contain alphabetic characters")) - - # If a subcloud group is not passed, use the default - group_id = payload.get('group_id', consts.DEFAULT_SUBCLOUD_GROUP_ID) - - if payload.get('name') in [dccommon_consts.DEFAULT_REGION_NAME, - dccommon_consts.SYSTEM_CONTROLLER_NAME]: - pecan.abort(400, _("name cannot be %(bad_name1)s or %(bad_name2)s") - % {'bad_name1': dccommon_consts.DEFAULT_REGION_NAME, - 'bad_name2': dccommon_consts.SYSTEM_CONTROLLER_NAME}) - - admin_subnet = payload.get('admin_subnet', None) - admin_start_ip = payload.get('admin_start_address', None) - admin_end_ip = payload.get('admin_end_address', None) - admin_gateway_ip = payload.get('admin_gateway_address', None) - - # Parse/validate the management subnet - subcloud_subnets = [] - subclouds = db_api.subcloud_get_all(context) - for subcloud in subclouds: - subcloud_subnets.append(IPNetwork(subcloud.management_subnet)) - - MIN_MANAGEMENT_SUBNET_SIZE = 8 - # subtract 3 for network, gateway and broadcast addresses. - MIN_MANAGEMENT_ADDRESSES = MIN_MANAGEMENT_SUBNET_SIZE - 3 - - management_subnet = None - try: - management_subnet = utils.validate_network_str( - payload.get('management_subnet'), - minimum_size=MIN_MANAGEMENT_SUBNET_SIZE, - existing_networks=subcloud_subnets, - operation=operation) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("management_subnet invalid: %s") % e) - - # Parse/validate the start/end addresses - management_start_ip = None - try: - management_start_ip = utils.validate_address_str( - payload.get('management_start_address'), management_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("management_start_address invalid: %s") % e) - - management_end_ip = None - try: - management_end_ip = utils.validate_address_str( - payload.get('management_end_address'), management_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("management_end_address invalid: %s") % e) - - if not management_start_ip < management_end_ip: - pecan.abort( - 400, - _("management_start_address not less than " - "management_end_address")) - - if not len(IPRange(management_start_ip, management_end_ip)) >= \ - MIN_MANAGEMENT_ADDRESSES: - pecan.abort( - 400, - _("management address range must contain at least %d " - "addresses") % MIN_MANAGEMENT_ADDRESSES) - - # Parse/validate the gateway - management_gateway_ip = None - if not admin_gateway_ip: - try: - management_gateway_ip = utils.validate_address_str(payload.get( - 'management_gateway_address'), management_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("management_gateway_address invalid: %s") % e) - - self._validate_admin_network_config( - admin_subnet, - admin_start_ip, - admin_end_ip, - admin_gateway_ip, - subcloud_subnets, - operation - ) - - # Ensure subcloud management gateway is not within the actual subcloud - # management subnet address pool for consistency with the - # systemcontroller gateway restriction below. Address collision - # is not a concern as the address is added to sysinv. - if admin_start_ip: - subcloud_mgmt_address_start = IPAddress(admin_start_ip) - else: - subcloud_mgmt_address_start = management_start_ip - if admin_end_ip: - subcloud_mgmt_address_end = IPAddress(admin_end_ip) - else: - subcloud_mgmt_address_end = management_end_ip - if admin_gateway_ip: - subcloud_mgmt_gw_ip = IPAddress(admin_gateway_ip) - else: - subcloud_mgmt_gw_ip = management_gateway_ip - - if ((subcloud_mgmt_gw_ip >= subcloud_mgmt_address_start) and - (subcloud_mgmt_gw_ip <= subcloud_mgmt_address_end)): - pecan.abort(400, _("%(network)s_gateway_address invalid, " - "is within management pool: %(start)s - " - "%(end)s") % - {'network': 'admin' if admin_gateway_ip else 'management', - 'start': subcloud_mgmt_address_start, - 'end': subcloud_mgmt_address_end}) - - # Ensure systemcontroller gateway is in the management subnet - # for the systemcontroller region. - management_address_pool = self._get_network_address_pool() - systemcontroller_subnet_str = "%s/%d" % ( - management_address_pool.network, - management_address_pool.prefix) - systemcontroller_subnet = IPNetwork(systemcontroller_subnet_str) - try: - systemcontroller_gw_ip = utils.validate_address_str( - payload.get('systemcontroller_gateway_address'), - systemcontroller_subnet - ) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("systemcontroller_gateway_address invalid: %s") % e) - - # Ensure systemcontroller gateway is not within the actual - # management subnet address pool to prevent address collision. - mgmt_address_start = IPAddress(management_address_pool.ranges[0][0]) - mgmt_address_end = IPAddress(management_address_pool.ranges[0][1]) - if ((systemcontroller_gw_ip >= mgmt_address_start) and - (systemcontroller_gw_ip <= mgmt_address_end)): - pecan.abort(400, _("systemcontroller_gateway_address invalid, " - "is within management pool: %(start)s - " - "%(end)s") % - {'start': mgmt_address_start, 'end': mgmt_address_end}) - - self._validate_oam_network_config( - payload.get('external_oam_subnet'), - payload.get('external_oam_gateway_address'), - payload.get('external_oam_floating_address'), - subcloud_subnets - ) - self._validate_group_id(context, group_id) - - def _validate_oam_network_config(self, - external_oam_subnet_str, - external_oam_gateway_address_str, - external_oam_floating_address_str, - existing_networks): - """validate whether oam network configuration is valid""" - - # Parse/validate the oam subnet - MIN_OAM_SUBNET_SIZE = 3 - oam_subnet = None - try: - oam_subnet = utils.validate_network_str( - external_oam_subnet_str, - minimum_size=MIN_OAM_SUBNET_SIZE, - existing_networks=existing_networks) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("external_oam_subnet invalid: %s") % e) - - # Parse/validate the addresses - try: - utils.validate_address_str( - external_oam_gateway_address_str, oam_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("oam_gateway_address invalid: %s") % e) - - try: - utils.validate_address_str( - external_oam_floating_address_str, oam_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("oam_floating_address invalid: %s") % e) - - def _validate_admin_network_config(self, - admin_subnet_str, - admin_start_address_str, - admin_end_address_str, - admin_gateway_address_str, - existing_networks, - operation): - """validate whether admin network configuration is valid""" - - if not (admin_subnet_str or admin_start_address_str or - admin_end_address_str or admin_gateway_address_str): - return - - MIN_ADMIN_SUBNET_SIZE = 5 - # subtract 3 for network, gateway and broadcast addresses. - MIN_ADMIN_ADDRESSES = MIN_ADMIN_SUBNET_SIZE - 3 - - admin_subnet = None - try: - admin_subnet = utils.validate_network_str( - admin_subnet_str, - minimum_size=MIN_ADMIN_SUBNET_SIZE, - existing_networks=existing_networks, - operation=operation) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("admin_subnet invalid: %s") % e) - - # Parse/validate the start/end addresses - admin_start_ip = None - try: - admin_start_ip = utils.validate_address_str( - admin_start_address_str, admin_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("admin_start_address invalid: %s") % e) - - admin_end_ip = None - try: - admin_end_ip = utils.validate_address_str( - admin_end_address_str, admin_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("admin_end_address invalid: %s") % e) - - if not admin_start_ip < admin_end_ip: - pecan.abort( - 400, - _("admin_start_address not less than " - "admin_end_address")) - - if not len(IPRange(admin_start_ip, admin_end_ip)) >= \ - MIN_ADMIN_ADDRESSES: - pecan.abort( - 400, - _("admin address range must contain at least %d " - "addresses") % MIN_ADMIN_ADDRESSES) - - # Parse/validate the gateway - try: - utils.validate_address_str( - admin_gateway_address_str, admin_subnet) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("admin_gateway_address invalid: %s") % e) - - subcloud_admin_address_start = IPAddress(admin_start_address_str) - subcloud_admin_address_end = IPAddress(admin_end_address_str) - subcloud_admin_gw_ip = IPAddress(admin_gateway_address_str) - if ((subcloud_admin_gw_ip >= subcloud_admin_address_start) and - (subcloud_admin_gw_ip <= subcloud_admin_address_end)): - pecan.abort(400, _("admin_gateway_address invalid, " - "is within admin pool: %(start)s - " - "%(end)s") % - {'start': subcloud_admin_address_start, - 'end': subcloud_admin_address_end}) - # TODO(nicodemos): Check if subcloud is online and network already exist in the # subcloud when the lock/unlock is not required for network reconfiguration def _validate_network_reconfiguration(self, payload, subcloud): @@ -643,319 +256,6 @@ class SubcloudsController(object): LOG.exception(msg) pecan.abort(400, msg) - def _format_ip_address(self, payload): - """Format IP addresses in 'bootstrap_values' and 'install_values'. - - The IPv6 addresses can be represented in multiple ways. Format and - update the IP addresses in payload before saving it to database. - """ - if INSTALL_VALUES in payload: - for k in INSTALL_VALUES_ADDRESSES: - if k in payload[INSTALL_VALUES]: - try: - address = IPAddress(payload[INSTALL_VALUES].get(k)).format() - except AddrFormatError as e: - LOG.exception(e) - pecan.abort(400, _("%s invalid: %s") % (k, e)) - payload[INSTALL_VALUES].update({k: address}) - - for k in BOOTSTRAP_VALUES_ADDRESSES: - if k in payload: - try: - address = IPAddress(payload.get(k)).format() - except AddrFormatError as e: - LOG.exception(e) - pecan.abort(400, _("%s invalid: %s") % (k, e)) - payload.update({k: address}) - - @staticmethod - def _validate_install_values(payload, subcloud=None): - """Validate install values if 'install_values' is present in payload. - - The image in payload install values is optional, and if not provided, - the image is set to the available active/inactive load image. - - :return boolean: True if bmc install requested, otherwise False - """ - install_values = payload.get('install_values') - if not install_values: - return False - - original_install_values = None - if subcloud: - if subcloud.data_install: - original_install_values = json.loads(subcloud.data_install) - - bmc_password = payload.get('bmc_password') - if not bmc_password: - pecan.abort(400, _('subcloud bmc_password required')) - try: - base64.b64decode(bmc_password).decode('utf-8') - except Exception: - msg = _('Failed to decode subcloud bmc_password, verify' - ' the password is base64 encoded') - LOG.exception(msg) - pecan.abort(400, msg) - payload['install_values'].update({'bmc_password': bmc_password}) - - software_version = payload.get('software_version') - if not software_version and subcloud: - software_version = subcloud.software_version - if 'software_version' in install_values: - install_software_version = str(install_values.get('software_version')) - if software_version and software_version != install_software_version: - pecan.abort(400, - _("The software_version value %s in the install values " - "yaml file does not match with the specified/current " - "software version of %s. Please correct or remove " - "this parameter from the yaml file and try again.") % - (install_software_version, software_version)) - else: - # Only install_values payload will be passed to the subcloud - # installation backend methods. The software_version is required by - # the installation, so it cannot be absent in the install_values. - LOG.debug("software_version (%s) is added to install_values" % - software_version) - payload['install_values'].update({'software_version': software_version}) - if 'persistent_size' in install_values: - persistent_size = install_values.get('persistent_size') - if not isinstance(persistent_size, int): - pecan.abort(400, _("The install value persistent_size (in MB) must " - "be a whole number greater than or equal to %s") % - consts.DEFAULT_PERSISTENT_SIZE) - if persistent_size < consts.DEFAULT_PERSISTENT_SIZE: - # the expected value is less than the default. so throw an error. - pecan.abort(400, _("persistent_size of %s MB is less than " - "the permitted minimum %s MB ") % - (str(persistent_size), consts.DEFAULT_PERSISTENT_SIZE)) - if 'hw_settle' in install_values: - hw_settle = install_values.get('hw_settle') - if not isinstance(hw_settle, int): - pecan.abort(400, _("The install value hw_settle (in seconds) must " - "be a whole number greater than or equal to 0")) - if hw_settle < 0: - pecan.abort(400, _("hw_settle of %s seconds is less than 0") % - (str(hw_settle))) - if 'extra_boot_params' in install_values: - # Validate 'extra_boot_params' boot parameter - # Note: this must be a single string (no spaces). If - # multiple boot parameters are required they can be - # separated by commas. They will be split into separate - # arguments by the miniboot.cfg kickstart. - extra_boot_params = install_values.get('extra_boot_params') - if extra_boot_params in ('', None, 'None'): - msg = "The install value extra_boot_params must not be empty." - pecan.abort(400, _(msg)) - if ' ' in extra_boot_params: - msg = ( - "Invalid install value 'extra_boot_params=" - f"{extra_boot_params}'. Spaces are not allowed " - "(use ',' to separate multiple arguments)" - ) - pecan.abort(400, _(msg)) - - for k in dccommon_consts.MANDATORY_INSTALL_VALUES: - if k not in install_values: - if original_install_values: - pecan.abort(400, _("Mandatory install value %s not present, " - "existing %s in DB: %s") % - (k, k, original_install_values.get(k))) - else: - pecan.abort(400, - _("Mandatory install value %s not present") % k) - - # check for the image at load vault load location - matching_iso, err_msg = utils.get_matching_iso(software_version) - if err_msg: - LOG.exception(err_msg) - pecan.abort(400, _(err_msg)) - LOG.info("Image in install_values is set to %s" % matching_iso) - payload['install_values'].update({'image': matching_iso}) - - if (install_values['install_type'] not in - list(range(dccommon_consts.SUPPORTED_INSTALL_TYPES))): - pecan.abort(400, _("install_type invalid: %s") % - install_values['install_type']) - - try: - ip_version = (IPAddress(install_values['bootstrap_address']). - version) - except AddrFormatError as e: - LOG.exception(e) - pecan.abort(400, _("bootstrap_address invalid: %s") % e) - - try: - bmc_address = IPAddress(install_values['bmc_address']) - except AddrFormatError as e: - LOG.exception(e) - pecan.abort(400, _("bmc_address invalid: %s") % e) - - if bmc_address.version != ip_version: - pecan.abort(400, _("bmc_address and bootstrap_address " - "must be the same IP version")) - - if 'nexthop_gateway' in install_values: - try: - gateway_ip = IPAddress(install_values['nexthop_gateway']) - except AddrFormatError as e: - LOG.exception(e) - pecan.abort(400, _("nexthop_gateway address invalid: %s") % e) - if gateway_ip.version != ip_version: - pecan.abort(400, _("nexthop_gateway and bootstrap_address " - "must be the same IP version")) - - if ('network_address' in install_values and - 'nexthop_gateway' not in install_values): - pecan.abort(400, _("nexthop_gateway is required when " - "network_address is present")) - - if 'nexthop_gateway' and 'network_address' in install_values: - if 'network_mask' not in install_values: - pecan.abort(400, _("The network mask is required when network " - "address is present")) - - network_str = (install_values['network_address'] + '/' + - str(install_values['network_mask'])) - try: - network = utils.validate_network_str(network_str, 1) - except exceptions.ValidateFail as e: - LOG.exception(e) - pecan.abort(400, _("network address invalid: %s") % e) - - if network.version != ip_version: - pecan.abort(400, _("network address and bootstrap address " - "must be the same IP version")) - - if 'rd.net.timeout.ipv6dad' in install_values: - try: - ipv6dad_timeout = int(install_values['rd.net.timeout.ipv6dad']) - if ipv6dad_timeout <= 0: - pecan.abort(400, _("rd.net.timeout.ipv6dad must be greater " - "than 0: %d") % ipv6dad_timeout) - except ValueError as e: - LOG.exception(e) - pecan.abort(400, _("rd.net.timeout.ipv6dad invalid: %s") % e) - - return True - - @staticmethod - def _validate_k8s_version(payload): - """Validate k8s version. - - If the specified release in the payload is not the active release, - the kubernetes_version value if specified in the subcloud bootstrap - yaml file must be of the same value as fresh_install_k8s_version of - the specified release. - """ - if payload['software_version'] == tsc.SW_VERSION: - return - - kubernetes_version = payload.get(KUBERNETES_VERSION) - if kubernetes_version: - try: - bootstrap_var_file = utils.get_playbook_for_software_version( - ANSIBLE_BOOTSTRAP_VALIDATE_CONFIG_VARS, - payload['software_version']) - fresh_install_k8s_version = utils.get_value_from_yaml_file( - bootstrap_var_file, - FRESH_INSTALL_K8S_VERSION) - if not fresh_install_k8s_version: - pecan.abort(400, _("%s not found in %s") - % (FRESH_INSTALL_K8S_VERSION, - bootstrap_var_file)) - if kubernetes_version != fresh_install_k8s_version: - pecan.abort(400, _("The kubernetes_version value (%s) " - "specified in the subcloud bootstrap " - "yaml file doesn't match " - "fresh_install_k8s_version value (%s) " - "of the specified release %s") - % (kubernetes_version, - fresh_install_k8s_version, - payload['software_version'])) - except exceptions.PlaybookNotFound: - pecan.abort(400, _("The bootstrap playbook validate-config vars " - "not found for %s software version") - % payload['software_version']) - - def _validate_install_parameters(self, payload): - name = payload.get('name') - if not name: - pecan.abort(400, _('name required')) - - system_mode = payload.get('system_mode') - if not system_mode: - pecan.abort(400, _('system_mode required')) - - # The admin network is optional, but takes precedence over the - # management network for communication between the subcloud and - # system controller if it is defined. - admin_subnet = payload.get('admin_subnet', None) - admin_start_ip = payload.get('admin_start_address', None) - admin_end_ip = payload.get('admin_end_address', None) - admin_gateway_ip = payload.get('admin_gateway_address', None) - if any([admin_subnet, admin_start_ip, admin_end_ip, - admin_gateway_ip]): - # If any admin parameter is defined, all admin parameters - # should be defined. - if not admin_subnet: - pecan.abort(400, _('admin_subnet required')) - if not admin_start_ip: - pecan.abort(400, _('admin_start_address required')) - if not admin_end_ip: - pecan.abort(400, _('admin_end_address required')) - if not admin_gateway_ip: - pecan.abort(400, _('admin_gateway_address required')) - - management_subnet = payload.get('management_subnet') - if not management_subnet: - pecan.abort(400, _('management_subnet required')) - - management_start_ip = payload.get('management_start_address') - if not management_start_ip: - pecan.abort(400, _('management_start_address required')) - - management_end_ip = payload.get('management_end_address') - if not management_end_ip: - pecan.abort(400, _('management_end_address required')) - - management_gateway_ip = payload.get('management_gateway_address') - if (admin_gateway_ip and management_gateway_ip): - pecan.abort(400, _('admin_gateway_address and ' - 'management_gateway_address cannot be ' - 'specified at the same time')) - elif (not admin_gateway_ip and not management_gateway_ip): - pecan.abort(400, _('management_gateway_address required')) - - systemcontroller_gateway_ip = payload.get( - 'systemcontroller_gateway_address') - if not systemcontroller_gateway_ip: - pecan.abort(400, - _('systemcontroller_gateway_address required')) - - external_oam_subnet = payload.get('external_oam_subnet') - if not external_oam_subnet: - pecan.abort(400, _('external_oam_subnet required')) - - external_oam_gateway_ip = payload.get('external_oam_gateway_address') - if not external_oam_gateway_ip: - pecan.abort(400, _('external_oam_gateway_address required')) - - external_oam_floating_ip = payload.get('external_oam_floating_address') - if not external_oam_floating_ip: - pecan.abort(400, _('external_oam_floating_address required')) - - sysadmin_password = payload.get('sysadmin_password') - if not sysadmin_password: - pecan.abort(400, _('subcloud sysadmin_password required')) - try: - payload['sysadmin_password'] = utils.decode_and_normalize_passwd( - sysadmin_password) - except Exception: - msg = _('Failed to decode subcloud sysadmin_password, ' - 'verify the password is base64 encoded') - LOG.exception(msg) - pecan.abort(400, msg) - def _get_subcloud_users(self): """Get the subcloud users and passwords from keyring""" DEFAULT_SERVICE_PROJECT_NAME = 'services' @@ -988,48 +288,6 @@ class SubcloudsController(object): return user_list - @staticmethod - def get_ks_client(region_name=dccommon_consts.DEFAULT_REGION_NAME): - """This will get a new keystone client (and new token)""" - try: - os_client = OpenStackDriver(region_name=region_name, - region_clients=None) - return os_client.keystone_client - except Exception: - LOG.warn('Failure initializing KeystoneClient ' - 'for region %s' % region_name) - raise - - def _validate_system_controller_patch_status(self): - ks_client = self.get_ks_client() - patching_client = PatchingClient( - dccommon_consts.DEFAULT_REGION_NAME, - ks_client.session, - endpoint=ks_client.endpoint_cache.get_endpoint('patching')) - patches = patching_client.query() - patch_ids = list(patches.keys()) - for patch_id in patch_ids: - valid_states = [ - patching_v1.PATCH_STATE_PARTIAL_APPLY, - patching_v1.PATCH_STATE_PARTIAL_REMOVE - ] - if patches[patch_id]['patchstate'] in valid_states: - pecan.abort(422, _('Subcloud add is not allowed while system ' - 'controller patching is still in progress.')) - - def _get_network_address_pool( - self, network='management', - region_name=dccommon_consts.DEFAULT_REGION_NAME): - """Get the region network address pool""" - ks_client = self.get_ks_client(region_name) - endpoint = ks_client.endpoint_cache.get_endpoint('sysinv') - sysinv_client = SysinvClient(region_name, - ks_client.session, - endpoint=endpoint) - if network == 'admin': - return sysinv_client.get_admin_address_pool() - return sysinv_client.get_management_address_pool() - # TODO(gsilvatr): refactor to use implementation from common/utils and test def _get_oam_addresses(self, context, subcloud_name, sc_ks_client): """Get the subclouds oam addresses""" @@ -1076,41 +334,6 @@ class SubcloudsController(object): else dccommon_consts.DEPLOY_CONFIG_UP_TO_DATE return sync_status - def _add_subcloud_to_database(self, context, payload): - try: - db_api.subcloud_get_by_name(context, payload['name']) - except exceptions.SubcloudNameNotFound: - pass - else: - raise exceptions.BadRequest( - resource='subcloud', - msg='Subcloud with that name already exists') - - # if group_id has been omitted from payload, use 'Default'. - group_id = payload.get('group_id', - consts.DEFAULT_SUBCLOUD_GROUP_ID) - data_install = None - if 'install_values' in payload: - data_install = json.dumps(payload['install_values']) - - subcloud = db_api.subcloud_create( - context, - payload['name'], - payload.get('description'), - payload.get('location'), - payload.get('software_version'), - utils.get_management_subnet(payload), - utils.get_management_gateway_address(payload), - utils.get_management_start_address(payload), - utils.get_management_end_address(payload), - payload['systemcontroller_gateway_address'], - consts.DEPLOY_STATE_NONE, - consts.ERROR_DESC_EMPTY, - False, - group_id, - data_install=data_install) - return subcloud - @staticmethod def _append_static_err_content(subcloud): err_dict = consts.ERR_MSG_DICT @@ -1244,7 +467,7 @@ class SubcloudsController(object): # Get the keystone client that will be used # for _get_deploy_config_sync_status and _get_oam_addresses - sc_ks_client = self.get_ks_client(subcloud.name) + sc_ks_client = psd_common.get_ks_client(subcloud.name) oam_addresses = self._get_oam_addresses(context, subcloud.name, sc_ks_client) if oam_addresses is not None: @@ -1263,70 +486,38 @@ class SubcloudsController(object): @utils.synchronized(LOCK_NAME) @index.when(method='POST', template='json') - def post(self, subcloud_ref=None): - """Create and deploy a new subcloud. - - :param subcloud_ref: ID of or name subcloud (only used when generating - config) - """ + def post(self): + """Create and deploy a new subcloud.""" policy.authorize(subclouds_policy.POLICY_ROOT % "create", {}, restcomm.extract_credentials_for_policy()) context = restcomm.extract_context_from_environ() - if subcloud_ref is None: + payload = psd_common.get_request_data(request, None, + SUBCLOUD_ADD_GET_FILE_CONTENTS) - payload = self._get_request_data(request) - if not payload: - pecan.abort(400, _('Body required')) + psd_common.validate_migrate_parameter(payload, request) - self._validate_install_parameters(payload) + psd_common.validate_sysadmin_password(payload) - # TODO(yuxing): this is not used, should it be removed? - migrate_str = payload.get('migrate') - if migrate_str is not None: - if migrate_str not in ["true", "false"]: - pecan.abort(400, _('The migrate option is invalid, ' - 'valid options are true and false.')) + psd_common.pre_deploy_create(payload, context, request) - if consts.DEPLOY_CONFIG in request.POST: - pecan.abort(400, _('migrate with deploy-config is ' - 'not allowed')) + try: + # Add the subcloud details to the database + subcloud = psd_common.add_subcloud_to_database(context, payload) - # If a subcloud release is not passed, use the current - # system controller software_version - payload['software_version'] = payload.get('release', tsc.SW_VERSION) + # Ask dcmanager-manager to add the subcloud. + # It will do all the real work... + self.dcmanager_rpc_client.add_subcloud( + context, subcloud.id, payload) - self._validate_system_controller_patch_status() - - self._validate_subcloud_config(context, payload) - - self._validate_install_values(payload) - - self._validate_k8s_version(payload) - - self._format_ip_address(payload) - - # Upload the deploy config files if it is included in the request - # It has a dependency on the subcloud name, and it is called after - # the name has been validated - self._upload_deploy_config_file(request, payload) - - try: - # Add the subcloud details to the database - subcloud = self._add_subcloud_to_database(context, payload) - # Ask dcmanager-manager to add the subcloud. - # It will do all the real work... - self.dcmanager_rpc_client.add_subcloud(context, payload) - return db_api.subcloud_db_model_to_dict(subcloud) - except RemoteError as e: - pecan.abort(422, e.value) - except Exception: - LOG.exception( - "Unable to create subcloud %s" % payload.get('name')) - pecan.abort(500, _('Unable to create subcloud')) - else: - pecan.abort(400, _('Invalid request')) + return db_api.subcloud_db_model_to_dict(subcloud) + except RemoteError as e: + pecan.abort(422, e.value) + except Exception: + LOG.exception( + "Unable to add subcloud %s" % payload.get('name')) + pecan.abort(500, _('Unable to add subcloud')) @utils.synchronized(LOCK_NAME) @index.when(method='PATCH', template='json') @@ -1373,7 +564,7 @@ class SubcloudsController(object): SUBCLOUD_MANDATORY_NETWORK_PARAMS)) if reconfigure_network: - system_controller_mgmt_pool = self._get_network_address_pool() + system_controller_mgmt_pool = psd_common.get_network_address_pool() # Required parameters payload['name'] = subcloud.name payload['system_controller_network'] = ( @@ -1418,8 +609,11 @@ class SubcloudsController(object): pecan.abort(400, _('Invalid group')) except exceptions.SubcloudGroupNotFound: pecan.abort(400, _('Invalid group')) - if self._validate_install_values(payload, subcloud): + + if INSTALL_VALUES in payload: + psd_common.validate_install_values(payload, subcloud) payload['data_install'] = json.dumps(payload[INSTALL_VALUES]) + try: if reconfigure_network: self.dcmanager_rpc_client.update_subcloud_with_network_reconfig( @@ -1477,26 +671,33 @@ class SubcloudsController(object): LOG.exception("Unable to reconfigure subcloud %s" % subcloud.name) pecan.abort(500, _('Unable to reconfigure subcloud')) elif verb == "reinstall": - payload = self._get_request_data(request) - install_values = self._get_subcloud_db_install_values(subcloud) + psd_common.check_required_parameters(request, + SUBCLOUD_ADD_MANDATORY_FILE) + + payload = psd_common.get_request_data( + request, subcloud, SUBCLOUD_ADD_GET_FILE_CONTENTS) + + install_values = psd_common.get_subcloud_db_install_values(subcloud) if subcloud.availability_status == dccommon_consts.AVAILABILITY_ONLINE: msg = _('Cannot re-install an online subcloud') LOG.exception(msg) pecan.abort(400, msg) - self._validate_install_parameters(payload) + psd_common.validate_bootstrap_values(payload) + + psd_common.validate_sysadmin_password(payload) if payload.get('name') != subcloud.name: pecan.abort(400, _('name is incorrect for the subcloud')) - self._validate_subcloud_config(context, payload, verb) + psd_common.validate_subcloud_config(context, payload, verb) # If a subcloud release is not passed, use the current # system controller software_version payload['software_version'] = payload.get('release', tsc.SW_VERSION) - self._validate_k8s_version(payload) + psd_common.validate_k8s_version(payload) # If the software version of the subcloud is different from the # specified or active load, update the software version in install @@ -1530,7 +731,7 @@ class SubcloudsController(object): data_install = json.dumps(payload['install_values']) # Upload the deploy config files if it is included in the request - self._upload_deploy_config_file(request, payload) + psd_common.upload_deploy_config_file(request, payload) try: # Align the software version of the subcloud with reinstall diff --git a/distributedcloud/dcmanager/common/phased_subcloud_deploy.py b/distributedcloud/dcmanager/common/phased_subcloud_deploy.py index 9b76584d5..d1904fbf2 100644 --- a/distributedcloud/dcmanager/common/phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/common/phased_subcloud_deploy.py @@ -151,6 +151,18 @@ def validate_system_controller_patch_status(operation: str): % operation) +def validate_migrate_parameter(payload, request): + migrate_str = payload.get('migrate') + if migrate_str is not None: + if migrate_str not in ["true", "false"]: + pecan.abort(400, _('The migrate option is invalid, ' + 'valid options are true and false.')) + + if consts.DEPLOY_CONFIG in request.POST: + pecan.abort(400, _('migrate with deploy-config is ' + 'not allowed')) + + def validate_subcloud_config(context, payload, operation=None, ignore_conflicts_with=None): """Check whether subcloud config is valid.""" @@ -452,7 +464,7 @@ def validate_install_values(payload, subcloud=None): """ install_values = payload.get('install_values') if not install_values: - return False + return original_install_values = None if subcloud: @@ -490,6 +502,7 @@ def validate_install_values(payload, subcloud=None): LOG.debug("software_version (%s) is added to install_values" % software_version) payload['install_values'].update({'software_version': software_version}) + if 'persistent_size' in install_values: persistent_size = install_values.get('persistent_size') if not isinstance(persistent_size, int): @@ -501,6 +514,7 @@ def validate_install_values(payload, subcloud=None): pecan.abort(400, _("persistent_size of %s MB is less than " "the permitted minimum %s MB ") % (str(persistent_size), consts.DEFAULT_PERSISTENT_SIZE)) + if 'hw_settle' in install_values: hw_settle = install_values.get('hw_settle') if not isinstance(hw_settle, int): @@ -510,6 +524,24 @@ def validate_install_values(payload, subcloud=None): pecan.abort(400, _("hw_settle of %s seconds is less than 0") % (str(hw_settle))) + if 'extra_boot_params' in install_values: + # Validate 'extra_boot_params' boot parameter + # Note: this must be a single string (no spaces). If + # multiple boot parameters are required they can be + # separated by commas. They will be split into separate + # arguments by the miniboot.cfg kickstart. + extra_boot_params = install_values.get('extra_boot_params') + if extra_boot_params in ('', None, 'None'): + msg = "The install value extra_boot_params must not be empty." + pecan.abort(400, _(msg)) + if ' ' in extra_boot_params: + msg = ( + "Invalid install value 'extra_boot_params=" + f"{extra_boot_params}'. Spaces are not allowed " + "(use ',' to separate multiple arguments)" + ) + pecan.abort(400, _(msg)) + for k in dccommon_consts.MANDATORY_INSTALL_VALUES: if k not in install_values: if original_install_values: @@ -592,8 +624,6 @@ def validate_install_values(payload, subcloud=None): LOG.exception(e) pecan.abort(400, _("rd.net.timeout.ipv6dad invalid: %s") % e) - return True - def validate_k8s_version(payload): """Validate k8s version. @@ -677,19 +707,21 @@ def format_ip_address(payload): def upload_deploy_config_file(request, payload): - if consts.DEPLOY_CONFIG in request.POST: - file_item = request.POST[consts.DEPLOY_CONFIG] - filename = getattr(file_item, 'filename', '') - if not filename: - pecan.abort(400, _("No %s file uploaded" - % consts.DEPLOY_CONFIG)) - file_item.file.seek(0, os.SEEK_SET) - contents = file_item.file.read() - # the deploy config needs to upload to the override location - fn = get_config_file_path(payload['name'], consts.DEPLOY_CONFIG) - upload_config_file(contents, fn, consts.DEPLOY_CONFIG) - payload.update({consts.DEPLOY_CONFIG: fn}) - get_common_deploy_files(payload, payload['software_version']) + file_item = request.POST.get(consts.DEPLOY_CONFIG) + if file_item is None: + return + + filename = getattr(file_item, 'filename', '') + if not filename: + pecan.abort(400, _("No %s file uploaded" % consts.DEPLOY_CONFIG)) + + file_item.file.seek(0, os.SEEK_SET) + contents = file_item.file.read() + # the deploy config needs to upload to the override location + fn = get_config_file_path(payload['name'], consts.DEPLOY_CONFIG) + upload_config_file(contents, fn, consts.DEPLOY_CONFIG) + payload[consts.DEPLOY_CONFIG] = fn + get_common_deploy_files(payload, payload['software_version']) def get_config_file_path(subcloud_name, config_file_type=None): @@ -718,8 +750,7 @@ def upload_config_file(file_item, config_file, config_type): def get_common_deploy_files(payload, software_version): missing_deploy_files = [] for f in consts.DEPLOY_COMMON_FILE_OPTIONS: - # Skip the prestage_images option as it is - # not relevant in this context + # Skip the prestage_images option as it is not relevant in this context if f == consts.DEPLOY_PRESTAGE: continue filename = None @@ -858,6 +889,35 @@ def populate_payload_with_pre_existing_data(payload: dict, get_common_deploy_files(payload, subcloud.software_version) +def pre_deploy_create(payload: dict, context: RequestContext, + request: pecan.Request): + if not payload: + pecan.abort(400, _('Body required')) + + validate_bootstrap_values(payload) + + # If a subcloud release is not passed, use the current + # system controller software_version + payload['software_version'] = payload.get('release', tsc.SW_VERSION) + + validate_subcloud_name_availability(context, payload['name']) + + validate_system_controller_patch_status("create") + + validate_subcloud_config(context, payload) + + validate_install_values(payload) + + validate_k8s_version(payload) + + format_ip_address(payload) + + # Upload the deploy config files if it is included in the request + # It has a dependency on the subcloud name, and it is called after + # the name has been validated + upload_deploy_config_file(request, payload) + + def pre_deploy_install(payload: dict, validate_password=False): if validate_password: validate_sysadmin_password(payload) diff --git a/distributedcloud/dcmanager/manager/service.py b/distributedcloud/dcmanager/manager/service.py index d02851e20..4d85bff6a 100644 --- a/distributedcloud/dcmanager/manager/service.py +++ b/distributedcloud/dcmanager/manager/service.py @@ -99,10 +99,10 @@ class DCManagerService(service.Service): super(DCManagerService, self).start() @request_context - def add_subcloud(self, context, payload): + def add_subcloud(self, context, subcloud_id, payload): # Adds a subcloud LOG.info("Handling add_subcloud request for: %s" % payload.get('name')) - return self.subcloud_manager.add_subcloud(context, payload) + return self.subcloud_manager.add_subcloud(context, subcloud_id, payload) @request_context def delete_subcloud(self, context, subcloud_id): diff --git a/distributedcloud/dcmanager/manager/subcloud_manager.py b/distributedcloud/dcmanager/manager/subcloud_manager.py index 3c7d6f3a3..dc78001b6 100644 --- a/distributedcloud/dcmanager/manager/subcloud_manager.py +++ b/distributedcloud/dcmanager/manager/subcloud_manager.py @@ -105,8 +105,10 @@ CERT_NAMESPACE = "dc-cert" TRANSITORY_STATES = { consts.DEPLOY_STATE_NONE: consts.DEPLOY_STATE_DEPLOY_PREP_FAILED, consts.DEPLOY_STATE_PRE_DEPLOY: consts.DEPLOY_STATE_DEPLOY_PREP_FAILED, + consts.DEPLOY_STATE_CREATING: consts.DEPLOY_STATE_CREATE_FAILED, consts.DEPLOY_STATE_PRE_INSTALL: consts.DEPLOY_STATE_PRE_INSTALL_FAILED, consts.DEPLOY_STATE_INSTALLING: consts.DEPLOY_STATE_INSTALL_FAILED, + consts.DEPLOY_STATE_PRE_BOOTSTRAP: consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED, consts.DEPLOY_STATE_BOOTSTRAPPING: consts.DEPLOY_STATE_BOOTSTRAP_FAILED, consts.DEPLOY_STATE_PRE_CONFIG: consts.DEPLOY_STATE_PRE_CONFIG_FAILED, consts.DEPLOY_STATE_CONFIGURING: consts.DEPLOY_STATE_CONFIG_FAILED, @@ -245,11 +247,10 @@ class SubcloudManager(manager.Manager): software_version if software_version else SW_VERSION] return install_command - # TODO(gherzman): rename compose_apply_command to compose_bootstrap_command - def compose_apply_command(self, subcloud_name, - ansible_subcloud_inventory_file, - software_version=None): - apply_command = [ + def compose_bootstrap_command(self, subcloud_name, + ansible_subcloud_inventory_file, + software_version=None): + bootstrap_command = [ "ansible-playbook", utils.get_playbook_for_software_version( ANSIBLE_SUBCLOUD_PLAYBOOK, software_version), @@ -258,23 +259,22 @@ class SubcloudManager(manager.Manager): ] # Add the overrides dir and region_name so the playbook knows # which overrides to load - apply_command += [ + bootstrap_command += [ "-e", str("override_files_dir='%s' region_name=%s") % ( dccommon_consts.ANSIBLE_OVERRIDES_PATH, subcloud_name), "-e", "install_release_version=%s" % software_version if software_version else SW_VERSION] - return apply_command + return bootstrap_command - # TODO(vgluzrom): rename compose_deploy_command to compose_config_command - def compose_deploy_command(self, subcloud_name, ansible_subcloud_inventory_file, payload): - deploy_command = [ + def compose_config_command(self, subcloud_name, ansible_subcloud_inventory_file, payload): + config_command = [ "ansible-playbook", payload[consts.DEPLOY_PLAYBOOK], "-e", "@%s" % dccommon_consts.ANSIBLE_OVERRIDES_PATH + "/" + subcloud_name + '_deploy_values.yml', "-i", ansible_subcloud_inventory_file, "--limit", subcloud_name ] - return deploy_command + return config_command def compose_backup_command(self, subcloud_name, ansible_subcloud_inventory_file): backup_command = [ @@ -331,205 +331,71 @@ class SubcloudManager(manager.Manager): dccommon_consts.ANSIBLE_OVERRIDES_PATH, subcloud_name)] return rehome_command - def add_subcloud(self, context, payload): + def rehome_subcloud(self, context, subcloud, payload): + # Ansible inventory filename for the specified subcloud + ansible_subcloud_inventory_file = self._get_ansible_filename( + subcloud.name, INVENTORY_FILE_POSTFIX) + + rehome_command = self.compose_rehome_command( + subcloud.name, + ansible_subcloud_inventory_file, + subcloud.software_version) + + self.run_deploy_thread(subcloud, payload, context, + rehome_command=rehome_command) + + def add_subcloud(self, context, subcloud_id, payload): """Add subcloud and notify orchestrators. :param context: request context object + :param subcloud_id: id of the subcloud :param payload: subcloud configuration """ - LOG.info("Adding subcloud %s." % payload['name']) - subcloud_id = db_api.subcloud_get_by_name(context, payload['name']).id + LOG.info(f"Adding subcloud {payload['name']}.") - # Check the migrate option from payload - migrate_str = payload.get('migrate', '') - migrate_flag = (migrate_str.lower() == 'true') - if migrate_flag: - subcloud = db_api.subcloud_update( - context, subcloud_id, - deploy_status=consts.DEPLOY_STATE_PRE_REHOME) + rehoming = payload.get('migrate', '').lower() == "true" + payload['ansible_ssh_pass'] = payload['sysadmin_password'] + + # Create the subcloud + subcloud = self.subcloud_deploy_create(context, subcloud_id, + payload, rehoming) + + # Return if create failed + if rehoming: + success_state = consts.DEPLOY_STATE_PRE_REHOME else: + success_state = consts.DEPLOY_STATE_CREATED + if subcloud.deploy_status != success_state: + return + + # Rehome subcloud + if rehoming: + self.rehome_subcloud(context, subcloud, payload) + return + + # Define which deploy phases should be run + phases_to_run = [] + if consts.INSTALL_VALUES in payload: + phases_to_run.append(consts.DEPLOY_PHASE_INSTALL) + phases_to_run.append(consts.DEPLOY_PHASE_BOOTSTRAP) + if consts.DEPLOY_CONFIG in payload: + phases_to_run.append(consts.DEPLOY_PHASE_CONFIG) + + # Finish adding the subcloud by running the deploy phases + succeeded = self.run_deploy_phases( + context, subcloud_id, payload, phases_to_run) + + if succeeded: subcloud = db_api.subcloud_update( - context, subcloud_id, - deploy_status=consts.DEPLOY_STATE_PRE_DEPLOY) + context, subcloud_id, deploy_status=consts.DEPLOY_STATE_DONE) - try: - # Ansible inventory filename for the specified subcloud - ansible_subcloud_inventory_file = self._get_ansible_filename( - subcloud.name, INVENTORY_FILE_POSTFIX) - - # Create a new route to this subcloud on the management interface - # on both controllers. - m_ks_client = OpenStackDriver( - region_name=dccommon_consts.DEFAULT_REGION_NAME, - region_clients=None).keystone_client - subcloud_subnet = netaddr.IPNetwork(utils.get_management_subnet(payload)) - endpoint = m_ks_client.endpoint_cache.get_endpoint('sysinv') - sysinv_client = SysinvClient(dccommon_consts.DEFAULT_REGION_NAME, - m_ks_client.session, - endpoint=endpoint) - LOG.debug("Getting cached regionone data for %s" % subcloud.name) - cached_regionone_data = self._get_cached_regionone_data(m_ks_client, sysinv_client) - for mgmt_if_uuid in cached_regionone_data['mgmt_interface_uuids']: - sysinv_client.create_route(mgmt_if_uuid, - str(subcloud_subnet.ip), - subcloud_subnet.prefixlen, - payload['systemcontroller_gateway_address'], - 1) - - # Create endpoints to this subcloud on the - # management-start-ip of the subcloud which will be allocated - # as the floating Management IP of the Subcloud if the - # Address Pool is not shared. Incase the endpoint entries - # are incorrect, or the management IP of the subcloud is changed - # in the future, it will not go managed or will show up as - # out of sync. To fix this use Openstack endpoint commands - # on the SystemController to change the subcloud endpoints. - # The non-identity endpoints are added to facilitate horizon access - # from the System Controller to the subcloud. - endpoint_config = [] - endpoint_ip = utils.get_management_start_address(payload) - if netaddr.IPAddress(endpoint_ip).version == 6: - endpoint_ip = '[' + endpoint_ip + ']' - - for service in m_ks_client.services_list: - if service.type == dccommon_consts.ENDPOINT_TYPE_PLATFORM: - admin_endpoint_url = "https://{}:6386/v1".format(endpoint_ip) - endpoint_config.append({"id": service.id, - "admin_endpoint_url": admin_endpoint_url}) - elif service.type == dccommon_consts.ENDPOINT_TYPE_IDENTITY: - admin_endpoint_url = "https://{}:5001/v3".format(endpoint_ip) - endpoint_config.append({"id": service.id, - "admin_endpoint_url": admin_endpoint_url}) - elif service.type == dccommon_consts.ENDPOINT_TYPE_PATCHING: - admin_endpoint_url = "https://{}:5492".format(endpoint_ip) - endpoint_config.append({"id": service.id, - "admin_endpoint_url": admin_endpoint_url}) - elif service.type == dccommon_consts.ENDPOINT_TYPE_FM: - admin_endpoint_url = "https://{}:18003".format(endpoint_ip) - endpoint_config.append({"id": service.id, - "admin_endpoint_url": admin_endpoint_url}) - elif service.type == dccommon_consts.ENDPOINT_TYPE_NFV: - admin_endpoint_url = "https://{}:4546".format(endpoint_ip) - endpoint_config.append({"id": service.id, - "admin_endpoint_url": admin_endpoint_url}) - - if len(endpoint_config) < 5: - raise exceptions.BadRequest( - resource='subcloud', - msg='Missing service in SystemController') - - for endpoint in endpoint_config: - try: - m_ks_client.keystone_client.endpoints.create( - endpoint["id"], - endpoint['admin_endpoint_url'], - interface=dccommon_consts.KS_ENDPOINT_ADMIN, - region=subcloud.name) - except Exception as e: - # Keystone service must be temporarily busy, retry - LOG.error(str(e)) - m_ks_client.keystone_client.endpoints.create( - endpoint["id"], - endpoint['admin_endpoint_url'], - interface=dccommon_consts.KS_ENDPOINT_ADMIN, - region=subcloud.name) - - # Inform orchestrator that subcloud has been added - self.dcorch_rpc_client.add_subcloud( - context, subcloud.name, subcloud.software_version) - - # create entry into alarm summary table, will get real values later - alarm_updates = {'critical_alarms': -1, - 'major_alarms': -1, - 'minor_alarms': -1, - 'warnings': -1, - 'cloud_status': consts.ALARMS_DISABLED} - db_api.subcloud_alarms_create(context, subcloud.name, - alarm_updates) - - # Regenerate the addn_hosts_dc file - self._create_addn_hosts_dc(context) - - self._populate_payload_with_cached_keystone_data( - cached_regionone_data, payload) - - if "install_values" in payload: - payload['install_values']['ansible_ssh_pass'] = \ - payload['sysadmin_password'] - - deploy_command = None - if "deploy_playbook" in payload: - self._prepare_for_deployment(payload, subcloud.name) - deploy_command = self.compose_deploy_command( - subcloud.name, - ansible_subcloud_inventory_file, - payload) - - del payload['sysadmin_password'] - payload['users'] = dict() - for user in USERS_TO_REPLICATE: - payload['users'][user] = \ - str(keyring.get_password( - user, dccommon_consts.SERVICES_USER_NAME)) - - # Create the ansible inventory for the new subcloud - utils.create_subcloud_inventory(payload, - ansible_subcloud_inventory_file) - - # create subcloud intermediate certificate and pass in keys - self._create_intermediate_ca_cert(payload) - - # Write this subclouds overrides to file - # NOTE: This file should not be deleted if subcloud add fails - # as it is used for debugging - self._write_subcloud_ansible_config(cached_regionone_data, payload) - - if migrate_flag: - rehome_command = self.compose_rehome_command( - subcloud.name, - ansible_subcloud_inventory_file, - subcloud.software_version) - apply_thread = threading.Thread( - target=self.run_deploy_thread, - args=(subcloud, payload, context, - None, None, None, rehome_command)) - else: - install_command = None - if "install_values" in payload: - install_command = self.compose_install_command( - subcloud.name, - ansible_subcloud_inventory_file, - subcloud.software_version) - apply_command = self.compose_apply_command( - subcloud.name, - ansible_subcloud_inventory_file, - subcloud.software_version) - apply_thread = threading.Thread( - target=self.run_deploy_thread, - args=(subcloud, payload, context, - install_command, apply_command, deploy_command)) - - apply_thread.start() - - return db_api.subcloud_db_model_to_dict(subcloud) - - except Exception: - LOG.exception("Failed to create subcloud %s" % payload['name']) - # If we failed to create the subcloud, update the - # deployment status - if migrate_flag: - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_REHOME_PREP_FAILED) - else: - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_DEPLOY_PREP_FAILED) + LOG.info(f"Finished adding subcloud {subcloud['name']}.") def reconfigure_subcloud(self, context, subcloud_id, payload): """Reconfigure subcloud :param context: request context object + :param subcloud_id: id of the subcloud :param payload: subcloud configuration """ LOG.info("Reconfiguring subcloud %s." % subcloud_id) @@ -542,10 +408,10 @@ class SubcloudManager(manager.Manager): ansible_subcloud_inventory_file = self._get_ansible_filename( subcloud.name, INVENTORY_FILE_POSTFIX) - deploy_command = None + config_command = None if "deploy_playbook" in payload: self._prepare_for_deployment(payload, subcloud.name) - deploy_command = self.compose_deploy_command( + config_command = self.compose_config_command( subcloud.name, ansible_subcloud_inventory_file, payload) @@ -553,7 +419,7 @@ class SubcloudManager(manager.Manager): del payload['sysadmin_password'] apply_thread = threading.Thread( target=self.run_deploy_thread, - args=(subcloud, payload, context, None, None, deploy_command)) + args=(subcloud, payload, context, None, None, config_command)) apply_thread.start() return db_api.subcloud_db_model_to_dict(subcloud) except Exception: @@ -595,10 +461,10 @@ class SubcloudManager(manager.Manager): payload['bootstrap-address'] = \ payload['install_values']['bootstrap_address'] - deploy_command = None + config_command = None if "deploy_playbook" in payload: self._prepare_for_deployment(payload, subcloud.name) - deploy_command = self.compose_deploy_command( + config_command = self.compose_config_command( subcloud.name, ansible_subcloud_inventory_file, payload) @@ -621,7 +487,7 @@ class SubcloudManager(manager.Manager): subcloud.name, ansible_subcloud_inventory_file, payload['software_version']) - apply_command = self.compose_apply_command( + bootstrap_command = self.compose_bootstrap_command( subcloud.name, ansible_subcloud_inventory_file, payload['software_version']) @@ -629,7 +495,7 @@ class SubcloudManager(manager.Manager): apply_thread = threading.Thread( target=self.run_deploy_thread, args=(subcloud, payload, context, - install_command, apply_command, deploy_command, + install_command, bootstrap_command, config_command, None, network_reconfig)) apply_thread.start() return db_api.subcloud_db_model_to_dict(subcloud) @@ -762,12 +628,21 @@ class SubcloudManager(manager.Manager): def _deploy_bootstrap_prep(self, context, subcloud, payload: dict, ansible_subcloud_inventory_file): + """Run the preparation steps needed to run the bootstrap operation + + :param context: target request context object + :param subcloud: subcloud model object + :param payload: bootstrap request parameters + :param ansible_subcloud_inventory_file: the ansible inventory file path + :return: ansible command needed to run the bootstrap playbook + """ management_subnet = utils.get_management_subnet(payload) sys_controller_gw_ip = payload.get( "systemcontroller_gateway_address") if (management_subnet != subcloud.management_subnet) or ( - sys_controller_gw_ip != subcloud.systemcontroller_gateway_ip): + sys_controller_gw_ip != subcloud.systemcontroller_gateway_ip + ): m_ks_client = OpenStackDriver( region_name=dccommon_consts.DEFAULT_REGION_NAME, region_clients=None).keystone_client @@ -814,23 +689,37 @@ class SubcloudManager(manager.Manager): utils.create_subcloud_inventory(payload, ansible_subcloud_inventory_file) - apply_command = self.compose_apply_command( + bootstrap_command = self.compose_bootstrap_command( subcloud.name, ansible_subcloud_inventory_file, subcloud.software_version) - return apply_command + return bootstrap_command def _deploy_config_prep(self, subcloud, payload: dict, ansible_subcloud_inventory_file): + """Run the preparation steps needed to run the config operation + + :param subcloud: target subcloud model object + :param payload: config request parameters + :param ansible_subcloud_inventory_file: the ansible inventory file path + :return: ansible command needed to run the config playbook + """ self._prepare_for_deployment(payload, subcloud.name) - deploy_command = self.compose_deploy_command( + config_command = self.compose_config_command( subcloud.name, ansible_subcloud_inventory_file, payload) - return deploy_command + return config_command def _deploy_install_prep(self, subcloud, payload: dict, ansible_subcloud_inventory_file): + """Run the preparation steps needed to run the install operation + + :param subcloud: target subcloud model object + :param payload: install request parameters + :param ansible_subcloud_inventory_file: the ansible inventory file path + :return: ansible command needed to run the install playbook + """ payload['install_values']['ansible_ssh_pass'] = \ payload['sysadmin_password'] payload['install_values']['ansible_become_pass'] = \ @@ -919,18 +808,25 @@ class SubcloudManager(manager.Manager): self.run_deploy_phases(context, subcloud_id, payload, deploy_states_to_run) - def subcloud_deploy_create(self, context, subcloud_id, payload): + def subcloud_deploy_create(self, context, subcloud_id, payload, rehoming=False): """Create subcloud and notify orchestrators. :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud configuration + :param rehoming: flag indicating if this is part of a rehoming operation + :return: resulting subcloud DB object """ LOG.info("Creating subcloud %s." % payload['name']) + if rehoming: + deploy_state = consts.DEPLOY_STATE_PRE_REHOME + else: + deploy_state = consts.DEPLOY_STATE_CREATING + subcloud = db_api.subcloud_update( context, subcloud_id, - deploy_status=consts.DEPLOY_STATE_CREATING) + deploy_status=deploy_state) try: # Create a new route to this subcloud on the management interface @@ -1022,7 +918,7 @@ class SubcloudManager(manager.Manager): self._prepare_for_deployment(payload, subcloud.name, populate_passwords=False) - payload['users'] = dict() + payload['users'] = {} for user in USERS_TO_REPLICATE: payload['users'][user] = \ str(keyring.get_password( @@ -1044,26 +940,36 @@ class SubcloudManager(manager.Manager): # as it is used for debugging self._write_subcloud_ansible_config(cached_regionone_data, payload) + if not rehoming: + deploy_state = consts.DEPLOY_STATE_CREATED + subcloud = db_api.subcloud_update( context, subcloud_id, - deploy_status=consts.DEPLOY_STATE_CREATED) + deploy_status=deploy_state) - return db_api.subcloud_db_model_to_dict(subcloud) + return subcloud except Exception: LOG.exception("Failed to create subcloud %s" % payload['name']) # If we failed to create the subcloud, update the deployment status + + if rehoming: + deploy_state = consts.DEPLOY_STATE_REHOME_PREP_FAILED + else: + deploy_state = consts.DEPLOY_STATE_CREATE_FAILED + subcloud = db_api.subcloud_update( context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_CREATE_FAILED) - return db_api.subcloud_db_model_to_dict(subcloud) + deploy_status=deploy_state) + return subcloud - def subcloud_deploy_install(self, context, subcloud_id, payload: dict): + def subcloud_deploy_install(self, context, subcloud_id, payload: dict) -> bool: """Install subcloud :param context: request context object :param subcloud_id: subcloud id from db :param payload: subcloud Install + :return: success status """ # Retrieve the subcloud details from the database @@ -1112,6 +1018,7 @@ class SubcloudManager(manager.Manager): :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud bootstrap configuration + :return: success status """ LOG.info("Bootstrapping subcloud %s." % payload['name']) @@ -1126,11 +1033,11 @@ class SubcloudManager(manager.Manager): ansible_subcloud_inventory_file = self._get_ansible_filename( subcloud.name, INVENTORY_FILE_POSTFIX) - apply_command = self._deploy_bootstrap_prep( + bootstrap_command = self._deploy_bootstrap_prep( context, subcloud, payload, ansible_subcloud_inventory_file) bootstrap_success = self._run_subcloud_bootstrap( - context, subcloud, apply_command, log_file) + context, subcloud, bootstrap_command, log_file) return bootstrap_success except Exception: @@ -1140,12 +1047,13 @@ class SubcloudManager(manager.Manager): deploy_status=consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED) return False - def subcloud_deploy_config(self, context, subcloud_id, payload: dict) -> dict: + def subcloud_deploy_config(self, context, subcloud_id, payload: dict) -> bool: """Configure subcloud :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud configuration + :return: success status """ LOG.info("Configuring subcloud %s." % subcloud_id) @@ -1162,13 +1070,13 @@ class SubcloudManager(manager.Manager): subcloud.name, INVENTORY_FILE_POSTFIX) self._prepare_for_deployment(payload, subcloud.name) - deploy_command = self.compose_deploy_command( + config_command = self.compose_config_command( subcloud.name, ansible_subcloud_inventory_file, payload) config_success = self._run_subcloud_config(subcloud, context, - deploy_command, log_file) + config_command, log_file) return config_success except Exception: @@ -1697,21 +1605,21 @@ class SubcloudManager(manager.Manager): LOG.exception(e) def run_deploy_thread(self, subcloud, payload, context, - install_command=None, apply_command=None, - deploy_command=None, rehome_command=None, + install_command=None, bootstrap_command=None, + config_command=None, rehome_command=None, network_reconfig=None): try: self._run_deploy(subcloud, payload, context, - install_command, apply_command, - deploy_command, rehome_command, + install_command, bootstrap_command, + config_command, rehome_command, network_reconfig) except Exception as ex: LOG.exception("run_deploy failed") raise ex def _run_deploy(self, subcloud, payload, context, - install_command, apply_command, - deploy_command, rehome_command, + install_command, bootstrap_command, + config_command, rehome_command, network_reconfig): log_file = ( os.path.join(consts.DC_ANSIBLE_LOG_DIR, subcloud.name) @@ -1724,7 +1632,7 @@ class SubcloudManager(manager.Manager): ) if not install_success: return - if apply_command: + if bootstrap_command: try: # Update the subcloud to bootstrapping db_api.subcloud_update( @@ -1739,7 +1647,7 @@ class SubcloudManager(manager.Manager): # Run the ansible boostrap-subcloud playbook LOG.info("Starting bootstrap of %s" % subcloud.name) try: - run_playbook(log_file, apply_command) + run_playbook(log_file, bootstrap_command) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING) @@ -1750,7 +1658,7 @@ class SubcloudManager(manager.Manager): error_description=msg[0:consts.ERROR_DESCRIPTION_LENGTH]) return LOG.info("Successfully bootstrapped %s" % subcloud.name) - if deploy_command: + if config_command: # Run the custom deploy playbook LOG.info("Starting deploy of %s" % subcloud.name) db_api.subcloud_update( @@ -1759,7 +1667,7 @@ class SubcloudManager(manager.Manager): error_description=consts.ERROR_DESC_EMPTY) try: - run_playbook(log_file, deploy_command) + run_playbook(log_file, config_command) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_DEPLOYING) @@ -1816,32 +1724,33 @@ class SubcloudManager(manager.Manager): error_description=consts.ERROR_DESC_EMPTY) def run_deploy_phases(self, context, subcloud_id, payload, - deploy_states_to_run): - """Run individual phases durring deploy operation.""" + deploy_phases_to_run): + """Run one or more deployment phases, ensuring correct order + + :param context: request context object + :param subcloud_id: subcloud id from db + :param payload: deploy phases payload + :param deploy_phases_to_run: deploy phases that should run + """ try: - for state in deploy_states_to_run: - if state == consts.DEPLOY_PHASE_INSTALL: - install_success = self.subcloud_deploy_install( - context, subcloud_id, payload) - if not install_success: - return - elif state == consts.DEPLOY_PHASE_BOOTSTRAP: - bootstrap_success = self.subcloud_deploy_bootstrap( - context, subcloud_id, payload) - if not bootstrap_success: - return - elif state == consts.DEPLOY_PHASE_CONFIG: - config_success = self.subcloud_deploy_config( - context, subcloud_id, payload) - if not config_success: - return + succeeded = True + if consts.DEPLOY_PHASE_INSTALL in deploy_phases_to_run: + succeeded = self.subcloud_deploy_install( + context, subcloud_id, payload) + if succeeded and consts.DEPLOY_PHASE_BOOTSTRAP in deploy_phases_to_run: + succeeded = self.subcloud_deploy_bootstrap( + context, subcloud_id, payload) + if succeeded and consts.DEPLOY_PHASE_CONFIG in deploy_phases_to_run: + succeeded = self.subcloud_deploy_config( + context, subcloud_id, payload) + return succeeded except Exception as ex: - LOG.exception("run_deploy failed") + LOG.exception("run_deploy_phases failed") raise ex def _run_subcloud_config(self, subcloud, context, - deploy_command, log_file): + config_command, log_file): # Run the custom deploy playbook LOG.info("Starting deploy of %s" % subcloud.name) db_api.subcloud_update( @@ -1852,7 +1761,7 @@ class SubcloudManager(manager.Manager): try: run_ansible = RunAnsible() aborted = run_ansible.exec_playbook( - log_file, deploy_command, subcloud.name) + log_file, config_command, subcloud.name) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_CONFIGURING) @@ -1921,7 +1830,7 @@ class SubcloudManager(manager.Manager): return True def _run_subcloud_bootstrap(self, context, subcloud, - apply_command, log_file): + bootstrap_command, log_file): # Update the subcloud deploy_status to bootstrapping db_api.subcloud_update( context, subcloud.id, @@ -1933,7 +1842,7 @@ class SubcloudManager(manager.Manager): try: run_ansible = RunAnsible() aborted = run_ansible.exec_playbook( - log_file, apply_command, subcloud.name) + log_file, bootstrap_command, subcloud.name) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING) diff --git a/distributedcloud/dcmanager/rpc/client.py b/distributedcloud/dcmanager/rpc/client.py index 38b79aee5..3555d3ffc 100644 --- a/distributedcloud/dcmanager/rpc/client.py +++ b/distributedcloud/dcmanager/rpc/client.py @@ -124,8 +124,9 @@ class ManagerClient(RPCClient): consts.TOPIC_DC_MANAGER, self.BASE_RPC_API_VERSION) - def add_subcloud(self, ctxt, payload): + def add_subcloud(self, ctxt, subcloud_id, payload): return self.cast(ctxt, self.make_msg('add_subcloud', + subcloud_id=subcloud_id, payload=payload)) def delete_subcloud(self, ctxt, subcloud_id): diff --git a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py index c372250dc..c2f61a6a9 100644 --- a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py @@ -42,7 +42,7 @@ FAKE_SUBCLOUD_INSTALL_VALUES = fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES class FakeRPCClient(object): def subcloud_deploy_create(self, context, subcloud_id, _): subcloud = db_api.subcloud_get(context, subcloud_id) - return db_api.subcloud_db_model_to_dict(subcloud) + return subcloud # Apply the TestSubcloudPost parameter validation tests to the subcloud deploy diff --git a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py index 27b2e3982..f01920b51 100644 --- a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py +++ b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py @@ -29,6 +29,7 @@ import webtest from dccommon import consts as dccommon_consts from dcmanager.api.controllers.v1 import subclouds from dcmanager.common import consts +from dcmanager.common import phased_subcloud_deploy as psd_common from dcmanager.common import prestage from dcmanager.common import utils as cutils from dcmanager.db.sqlalchemy import api as db_api @@ -277,8 +278,7 @@ class TestSubcloudPost(testroot.DCManagerApiTest, '192.168.204.2', '192.168.204.100') - p = mock.patch.object(subclouds.SubcloudsController, - '_get_network_address_pool') + p = mock.patch.object(psd_common, 'get_network_address_pool') self.mock_get_network_address_pool = p.start() self.mock_get_network_address_pool.return_value = \ self.management_address_pool @@ -288,11 +288,11 @@ class TestSubcloudPost(testroot.DCManagerApiTest, self.mock_rpc_client = p.start() self.addCleanup(p.stop) - p = mock.patch.object(subclouds.SubcloudsController, 'get_ks_client') + p = mock.patch.object(psd_common, 'get_ks_client') self.mock_get_ks_client = p.start() self.addCleanup(p.stop) - p = mock.patch.object(subclouds.PatchingClient, 'query') + p = mock.patch.object(psd_common.PatchingClient, 'query') self.mock_query = p.start() self.addCleanup(p.stop) @@ -562,7 +562,7 @@ class TestSubcloudPost(testroot.DCManagerApiTest, # Verify the request was rejected self.assertEqual(response.status_code, http_client.BAD_REQUEST) - @mock.patch.object(subclouds.SubcloudsController, '_validate_k8s_version') + @mock.patch.object(psd_common, 'validate_k8s_version') @mock.patch('dcmanager.common.utils.get_vault_load_files') def test_post_subcloud_with_release_parameter(self, mock_vault_files, mock_validate_k8s_version): @@ -596,7 +596,7 @@ class TestSubcloudPost(testroot.DCManagerApiTest, # Revert the software_version value self.install_data['software_version'] = SW_VERSION - @mock.patch.object(subclouds.PatchingClient, 'query') + @mock.patch.object(psd_common.PatchingClient, 'query') def test_post_subcloud_when_partial_applied_patch(self, mock_query): """Test POST operation when there is a partial-applied patch.""" @@ -1020,28 +1020,31 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.mock_rpc_state_client = p.start() self.addCleanup(p.stop) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_delete_subcloud(self, mock_rpc_client): + p = mock.patch.object(rpc_client, 'ManagerClient') + self.mock_rpc_client = p.start() + self.addCleanup(p.stop) + + p = mock.patch.object(psd_common, 'get_ks_client') + self.mock_get_ks_client = p.start() + self.addCleanup(p.stop) + + def test_delete_subcloud(self): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) delete_url = FAKE_URL + '/' + str(subcloud.id) - mock_rpc_client().delete_subcloud.return_value = True + self.mock_rpc_client().delete_subcloud.return_value = True response = self.app.delete_json(delete_url, headers=FAKE_HEADERS) - mock_rpc_client().delete_subcloud.assert_called_once_with( + self.mock_rpc_client().delete_subcloud.assert_called_once_with( mock.ANY, mock.ANY) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_delete_wrong_request(self, mock_rpc_client): + def test_delete_wrong_request(self): delete_url = WRONG_URL + '/' + FAKE_ID six.assertRaisesRegex(self, webtest.app.AppError, "404 *", self.app.delete_json, delete_url, headers=FAKE_HEADERS) - @mock.patch.object(subclouds.SubcloudsController, - '_get_oam_addresses') - @mock.patch.object(rpc_client, 'ManagerClient') + @mock.patch.object(subclouds.SubcloudsController, '_get_oam_addresses') def test_get_subcloud(self, - mock_rpc_client, mock_get_oam_addresses): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) get_url = FAKE_URL + '/' + str(subcloud.id) @@ -1053,14 +1056,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): @mock.patch.object(subclouds.SubcloudsController, '_get_deploy_config_sync_status') - @mock.patch.object(subclouds.SubcloudsController, - '_get_oam_addresses') - @mock.patch.object(subclouds.SubcloudsController, - 'get_ks_client') - @mock.patch.object(rpc_client, 'ManagerClient') + @mock.patch.object(subclouds.SubcloudsController, '_get_oam_addresses') def test_get_online_subcloud_with_additional_detail(self, - mock_rpc_client, - mock_get_ks_client, mock_get_oam_addresses, mock_get_deploy_config_sync_status): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) @@ -1075,7 +1072,6 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): '10.10.10.3', '10.10.10.1', '10.10.10.2') - mock_get_ks_client.return_value = 'ks_client' mock_get_oam_addresses.return_value = oam_addresses mock_get_deploy_config_sync_status.return_value = dccommon_consts.DEPLOY_CONFIG_UP_TO_DATE response = self.app.get(get_url, headers=FAKE_HEADERS) @@ -1085,9 +1081,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual( 'Deployment: configurations up-to-date', response.json['deploy_config_sync_status']) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_get_offline_subcloud_with_additional_detail(self, - mock_rpc_client): + def test_get_offline_subcloud_with_additional_detail(self): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) get_url = FAKE_URL + '/' + str(subcloud.id) + '/detail' response = self.app.get(get_url, headers=FAKE_HEADERS) @@ -1098,21 +1092,14 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): @mock.patch.object(subclouds.SubcloudsController, '_get_deploy_config_sync_status') - @mock.patch.object(subclouds.SubcloudsController, - '_get_oam_addresses') - @mock.patch.object(subclouds.SubcloudsController, - 'get_ks_client') - @mock.patch.object(rpc_client, 'ManagerClient') + @mock.patch.object(subclouds.SubcloudsController, '_get_oam_addresses') def test_get_subcloud_deploy_config_status_unknown(self, - mock_rpc_client, - mock_get_ks_client, mock_get_oam_addresses, mock_get_deploy_config_sync_status): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) updated_subcloud = db_api.subcloud_update( self.ctx, subcloud.id, availability_status=dccommon_consts.AVAILABILITY_ONLINE) get_url = FAKE_URL + '/' + str(updated_subcloud.id) + '/detail' - mock_get_ks_client.return_value = 'ks_client' mock_get_oam_addresses.return_value = None mock_get_deploy_config_sync_status.return_value = None response = self.app.get(get_url, headers=FAKE_HEADERS) @@ -1120,48 +1107,37 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual(response.status_code, http_client.OK) self.assertEqual('unknown', response.json['deploy_config_sync_status']) - @mock.patch.object(subclouds.SubcloudsController, - '_get_oam_addresses') - @mock.patch.object(subclouds.SubcloudsController, - 'get_ks_client') - @mock.patch.object(rpc_client, 'ManagerClient') - def test_get_subcloud_oam_ip_unavailable(self, - mock_rpc_client, - mock_get_ks_client, - mock_get_oam_addresses): + @mock.patch.object(subclouds.SubcloudsController, '_get_oam_addresses') + def test_get_subcloud_oam_ip_unavailable(self, mock_get_oam_addresses): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) updated_subcloud = db_api.subcloud_update( self.ctx, subcloud.id, availability_status=dccommon_consts.AVAILABILITY_ONLINE) get_url = FAKE_URL + '/' + str(updated_subcloud.id) + '/detail' - mock_get_ks_client.return_value = 'ks_client' + self.mock_get_ks_client.return_value = 'ks_client' mock_get_oam_addresses.return_value = None response = self.app.get(get_url, headers=FAKE_HEADERS) self.assertEqual(response.content_type, 'application/json') self.assertEqual(response.status_code, http_client.OK) self.assertEqual('unavailable', response.json['oam_floating_ip']) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_get_wrong_request(self, mock_rpc_client): + def test_get_wrong_request(self): get_url = WRONG_URL + '/' + FAKE_ID six.assertRaisesRegex(self, webtest.app.AppError, "404 *", self.app.get, get_url, headers=FAKE_HEADERS) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_get_subcloud_all(self, mock_rpc_client): + def test_get_subcloud_all(self): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) get_url = FAKE_URL response = self.app.get(get_url, headers=FAKE_HEADERS) self.assertEqual(response.json['subclouds'][0]['name'], subcloud.name) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'management-state': dccommon_consts.MANAGEMENT_UNMANAGED} - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, @@ -1173,17 +1149,14 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual(dccommon_consts.MANAGEMENT_UNMANAGED, updated_subcloud.management_state) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_update_subcloud_group_value(self, - mock_get_patch_data, - mock_rpc_client): + def test_update_subcloud_group_value(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) good_values = [1, "1"] expected_group_id = 1 for x in good_values: data = {'group_id': x} - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, @@ -1195,15 +1168,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual(expected_group_id, updated_subcloud.group_id) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_update_subcloud_group_value_by_name(self, - mock_get_patch_data, - mock_rpc_client): + def test_update_subcloud_group_value_by_name(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) expected_group_id = 1 data = {'group_id': 'Default'} - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, @@ -1215,11 +1185,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual(expected_group_id, updated_subcloud.group_id) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_update_subcloud_group_bad_value(self, - mock_get_patch_data, - mock_rpc_client): + def test_update_subcloud_group_bad_value(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) # There is only 1 subcloud group 'Default' which has id '1' # This should test that boolean, zero, negative, float and bad values @@ -1227,7 +1194,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): bad_values = [0, -1, 2, "0", "-1", 0.5, "BadName", "False", "True"] for x in bad_values: data = {'group_id': x} - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, @@ -1235,12 +1202,10 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): expect_errors=True) self.assertEqual(response.status_int, 400) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - @mock.patch('dcmanager.common.utils.get_vault_load_files') + @mock.patch.object(cutils, 'get_vault_load_files') def test_update_subcloud_install_values_persistent_size(self, mock_vault_files, - mock_get_patch_data, - mock_rpc_client): + mock_get_patch_data): mock_vault_files.return_value = ('fake_iso', 'fake_sig') subcloud = fake_subcloud.create_fake_subcloud(self.ctx, data_install=None) payload = {} @@ -1250,7 +1215,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): data = {'bmc_password': encoded_password} payload.update({'install_values': install_data}) payload.update(data) - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = payload fake_content = "fake content".encode("utf-8") @@ -1261,7 +1226,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): "fake_name", fake_content)]) install_data.update({'bmc_password': encoded_password}) - mock_rpc_client().update_subcloud.assert_called_once_with( + self.mock_rpc_client().update_subcloud.assert_called_once_with( mock.ANY, subcloud.id, management_state=None, @@ -1272,14 +1237,13 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): force=None) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, '_get_network_address_pool') + @mock.patch.object(psd_common, 'get_network_address_pool') @mock.patch.object(subclouds.SubcloudsController, '_validate_network_reconfiguration') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') def test_patch_subcloud_network_values( self, mock_get_patch_data, mock_validate_network_reconfiguration, - mock_mgmt_address_pool, mock_rpc_client): + mock_mgmt_address_pool): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) db_api.subcloud_update( self.ctx, subcloud.id, @@ -1298,25 +1262,21 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): '192.168.204.100') mock_mgmt_address_pool.return_value = fake_management_address_pool - mock_rpc_client().update_subcloud_with_network_reconfig.return_value = True + self.mock_rpc_client().update_subcloud_with_network_reconfig.return_value = True mock_get_patch_data.return_value = payload response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=payload) self.assertEqual(response.status_int, 200) mock_validate_network_reconfiguration.assert_called_once() - mock_rpc_client().update_subcloud_with_network_reconfig.assert_called_once_with( - mock.ANY, - subcloud.id, - payload) + self.mock_rpc_client().update_subcloud_with_network_reconfig.\ + assert_called_once_with(mock.ANY, subcloud.id, payload) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - @mock.patch('dcmanager.common.utils.get_vault_load_files') + @mock.patch.object(cutils, 'get_vault_load_files') def test_patch_subcloud_install_values(self, mock_vault_files, - mock_get_patch_data, - mock_rpc_client): + mock_get_patch_data): mock_vault_files.return_value = ('fake_iso', 'fake_sig') subcloud = fake_subcloud.create_fake_subcloud(self.ctx, data_install=None) payload = {} @@ -1326,7 +1286,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): data = {'bmc_password': encoded_password} payload.update({'install_values': install_data}) payload.update(data) - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = payload fake_content = "fake content".encode("utf-8") @@ -1337,7 +1297,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): "fake_name", fake_content)]) install_data.update({'bmc_password': encoded_password}) - mock_rpc_client().update_subcloud.assert_called_once_with( + self.mock_rpc_client().update_subcloud.assert_called_once_with( mock.ANY, subcloud.id, management_state=None, @@ -1348,11 +1308,10 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): force=None) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - @mock.patch('dcmanager.common.utils.get_vault_load_files') + @mock.patch.object(cutils, 'get_vault_load_files') def test_patch_subcloud_install_values_with_existing_data_install( - self, mock_vault_files, mock_get_patch_data, mock_rpc_client): + self, mock_vault_files, mock_get_patch_data): mock_vault_files.return_value = ('fake_iso', 'fake_sig') install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) subcloud = fake_subcloud.create_fake_subcloud( @@ -1364,7 +1323,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): data = {'bmc_password': encoded_password} payload.update({'install_values': install_data}) payload.update(data) - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = payload fake_content = "fake content".encode("utf-8") @@ -1375,7 +1334,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): "fake_name", fake_content)]) install_data.update({'bmc_password': encoded_password}) - mock_rpc_client().update_subcloud.assert_called_once_with( + self.mock_rpc_client().update_subcloud.assert_called_once_with( mock.ANY, subcloud.id, management_state=None, @@ -1386,10 +1345,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): force=None) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud_no_body(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud_no_body(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {} mock_get_patch_data.return_value = data @@ -1398,10 +1355,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud_bad_status(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud_bad_status(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'management-state': 'bad-status'} mock_get_patch_data.return_value = data @@ -1410,10 +1365,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud_bad_force_value(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud_bad_force_value(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'management-state': dccommon_consts.MANAGEMENT_MANAGED, 'force': 'bad-value'} @@ -1423,10 +1376,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud_forced_unmanaged(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud_forced_unmanaged(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'management-state': dccommon_consts.MANAGEMENT_UNMANAGED, 'force': True} @@ -1436,19 +1387,17 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_patch_data') - def test_patch_subcloud_forced_manage(self, mock_get_patch_data, - mock_rpc_client): + def test_patch_subcloud_forced_manage(self, mock_get_patch_data): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) payload = {'management-state': dccommon_consts.MANAGEMENT_MANAGED, 'force': True} - mock_rpc_client().update_subcloud.return_value = True + self.mock_rpc_client().update_subcloud.return_value = True mock_get_patch_data.return_value = payload response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id), headers=FAKE_HEADERS, params=payload) - mock_rpc_client().update_subcloud.assert_called_once_with( + self.mock_rpc_client().update_subcloud.assert_called_once_with( mock.ANY, mock.ANY, management_state=dccommon_consts.MANAGEMENT_MANAGED, @@ -1459,137 +1408,119 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): force=True) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_reconfig_payload') - def test_reconfigure_subcloud(self, mock_get_reconfig_payload, - mock_rpc_client): + def test_reconfigure_subcloud(self, mock_get_reconfig_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) fake_password = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii') data = {'sysadmin_password': fake_password} - mock_rpc_client().reconfigure_subcloud.return_value = True + self.mock_rpc_client().reconfigure_subcloud.return_value = True mock_get_reconfig_payload.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/reconfigure', headers=FAKE_HEADERS, params=data) - mock_rpc_client().reconfigure_subcloud.assert_called_once_with( + self.mock_rpc_client().reconfigure_subcloud.assert_called_once_with( mock.ANY, subcloud.id, mock.ANY) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_reconfig_payload') - def test_reconfigure_subcloud_no_body(self, mock_get_reconfig_payload, - mock_rpc_client): + def test_reconfigure_subcloud_no_body(self, mock_get_reconfig_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) # Pass an empty request body data = {} mock_get_reconfig_payload.return_value = data - mock_rpc_client().reconfigure_subcloud.return_value = True + self.mock_rpc_client().reconfigure_subcloud.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/reconfigure', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_reconfig_payload') - def test_reconfigure_subcloud_bad_password(self, mock_get_reconfig_payload, - mock_rpc_client): + def test_reconfigure_subcloud_bad_password(self, mock_get_reconfig_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) # Pass a sysadmin_password which is not base64 encoded data = {'sysadmin_password': 'not_base64'} mock_get_reconfig_payload.return_value = data - mock_rpc_client().reconfigure_subcloud.return_value = True + self.mock_rpc_client().reconfigure_subcloud.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/reconfigure', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_reconfig_payload') def test_reconfigure_invalid_deploy_status(self, - mock_get_reconfig_payload, - mock_rpc_client): + mock_get_reconfig_payload): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, deploy_status=consts.DEPLOY_STATE_BOOTSTRAP_FAILED) fake_password = base64.b64encode('testpass'.encode("utf-8")).decode("utf-8") data = {'sysadmin_password': fake_password} mock_get_reconfig_payload.return_value = data - mock_rpc_client().reconfigure_subcloud.return_value = True + self.mock_rpc_client().reconfigure_subcloud.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/reconfigure', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(rpc_client, 'SubcloudStateClient') @mock.patch.object(subclouds.SubcloudsController, '_get_updatestatus_payload') - def test_subcloud_updatestatus(self, mock_get_updatestatus_payload, - mock_rpc_state_client, _): + def test_subcloud_updatestatus(self, mock_get_updatestatus_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'endpoint': 'dc-cert', 'status': 'in-sync'} mock_get_updatestatus_payload.return_value = data - mock_rpc_state_client().update_subcloud_endpoint_status.return_value = True + self.mock_rpc_state_client().update_subcloud_endpoint_status.return_value = True response = self.app.patch_json( FAKE_URL + '/' + str(subcloud.id) + '/update_status', data, headers=FAKE_HEADERS) - mock_rpc_state_client().update_subcloud_endpoint_status.assert_called_once_with( - mock.ANY, - subcloud.name, - 'dc-cert', - 'in-sync') + self.mock_rpc_state_client().update_subcloud_endpoint_status.\ + assert_called_once_with(mock.ANY, subcloud.name, 'dc-cert', 'in-sync') self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_updatestatus_payload') def test_subcloud_updatestatus_invalid_endpoint( - self, mock_get_updatestatus_payload, - mock_rpc_client): + self, mock_get_updatestatus_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'endpoint': 'any-other-endpoint', 'status': 'in-sync'} mock_get_updatestatus_payload.return_value = data - mock_rpc_client().update_subcloud_endpoint_status.return_value = True + self.mock_rpc_client().update_subcloud_endpoint_status.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/update_status', headers=FAKE_HEADERS, params=data) - mock_rpc_client().update_subcloud_endpoint_status.assert_not_called() + self.mock_rpc_client().update_subcloud_endpoint_status.assert_not_called() - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_updatestatus_payload') def test_subcloud_updatestatus_invalid_status( - self, mock_get_updatestatus_payload, - mock_rpc_client): + self, mock_get_updatestatus_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'endpoint': 'dc-cert', 'status': 'not-sure'} mock_get_updatestatus_payload.return_value = data - mock_rpc_client().update_subcloud_endpoint_status.return_value = True + self.mock_rpc_client().update_subcloud_endpoint_status.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/update_status', headers=FAKE_HEADERS, params=data) - mock_rpc_client().update_subcloud_endpoint_status.assert_not_called() + self.mock_rpc_client().update_subcloud_endpoint_status.assert_not_called() - @mock.patch.object(rpc_client, 'ManagerClient') - def test_get_config_file_path(self, mock_rpc_client): - sc = subclouds.SubcloudsController() - bootstrap_file = sc._get_config_file_path("subcloud1") - install_values = sc._get_config_file_path("subcloud1", "install_values") - deploy_config = sc._get_config_file_path("subcloud1", consts.DEPLOY_CONFIG) + def test_get_config_file_path(self): + bootstrap_file = psd_common.get_config_file_path("subcloud1") + install_values = psd_common.get_config_file_path("subcloud1", + "install_values") + deploy_config = psd_common.get_config_file_path("subcloud1", + consts.DEPLOY_CONFIG) self.assertEqual(bootstrap_file, f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1.yml') self.assertEqual(install_values, @@ -1597,10 +1528,8 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): self.assertEqual(deploy_config, f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_deploy_config.yml') - @mock.patch.object(rpc_client, 'ManagerClient') - def test_format_ip_address(self, mock_rpc_client): - sc = subclouds.SubcloudsController() - fake_payload = dict() + def test_format_ip_address(self): + fake_payload = {} good_values = { '10.10.10.3': '10.10.10.3', '2620:10a:a001:a103::1135': '2620:10a:a001:a103::1135', @@ -1610,53 +1539,40 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): } for k, v in good_values.items(): - fake_payload.update({'bootstrap-address': k}) - sc._format_ip_address(fake_payload) + fake_payload['bootstrap-address'] = k + psd_common.format_ip_address(fake_payload) self.assertEqual(fake_payload['bootstrap-address'], v) - fake_payload[subclouds.INSTALL_VALUES] = dict() + fake_payload[subclouds.INSTALL_VALUES] = {} for k, v in good_values.items(): - fake_payload[subclouds.INSTALL_VALUES].update({'bmc_address': k}) - sc._format_ip_address(fake_payload) + fake_payload[subclouds.INSTALL_VALUES]['bmc_address'] = k + psd_common.format_ip_address(fake_payload) self.assertEqual(fake_payload[subclouds.INSTALL_VALUES]['bmc_address'], v) - fake_payload.update({'othervalues1': 'othervalues1'}) - fake_payload[subclouds.INSTALL_VALUES].update({'othervalues2': 'othervalues2'}) - sc._format_ip_address(fake_payload) + fake_payload['othervalues1'] = 'othervalues1' + fake_payload[subclouds.INSTALL_VALUES]['othervalues2'] = 'othervalues2' + psd_common.format_ip_address(fake_payload) self.assertEqual(fake_payload['othervalues1'], 'othervalues1') self.assertEqual(fake_payload[subclouds.INSTALL_VALUES]['othervalues2'], 'othervalues2') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(keyring, 'get_password') - def test_get_subcloud_db_install_values( - self, mock_keyring, mock_rpc_client): + def test_get_subcloud_db_install_values(self): install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) encoded_password = base64.b64encode( 'bmc_password'.encode("utf-8")).decode('utf-8') - bmc_password = {'bmc_password': encoded_password} - install_data.update(bmc_password) + install_data['bmc_password'] = encoded_password test_subcloud = copy.copy(FAKE_SUBCLOUD_DATA) subcloud_info = Subcloud(test_subcloud, False) subcloud_info.data_install = json.dumps(install_data) - sc = subclouds.SubcloudsController() - actual_result = sc._get_subcloud_db_install_values(subcloud_info) - actual_result.update({ - 'admin_password': 'adminpass' - }) - install_data.update({ - 'ansible_become_pass': consts.TEMP_SYSADMIN_PASSWORD, - 'ansible_ssh_pass': consts.TEMP_SYSADMIN_PASSWORD, - 'admin_password': 'adminpass' - }) + actual_result = psd_common.get_subcloud_db_install_values(subcloud_info) + self.assertEqual( json.loads(json.dumps(install_data)), json.loads(json.dumps(actual_result))) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(keyring, 'get_password') def test_get_subcloud_db_install_values_without_bmc_password( - self, mock_keyring, mock_rpc_client): + self, mock_keyring): install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) subcloud = fake_subcloud.create_fake_subcloud( self.ctx, data_install=json.dumps(install_data)) @@ -1667,42 +1583,43 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): headers=FAKE_HEADERS) @mock.patch.object(cutils, 'get_vault_load_files') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, '_upload_deploy_config_file') - @mock.patch.object(subclouds.SubcloudsController, '_validate_k8s_version') - @mock.patch.object(subclouds.SubcloudsController, '_validate_subcloud_config') - @mock.patch.object(subclouds.SubcloudsController, '_validate_install_parameters') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'upload_deploy_config_file') + @mock.patch.object(psd_common, 'validate_k8s_version') + @mock.patch.object(psd_common, 'validate_subcloud_config') + @mock.patch.object(psd_common, 'validate_bootstrap_values') def test_reinstall_subcloud( - self, mock_get_request_data, mock_get_subcloud_db_install_values, - mock_validate_install_parameters, mock_validate_subcloud_config, + self, mock_validate_bootstrap_values, mock_validate_subcloud_config, mock_validate_k8s_version, mock_upload_deploy_config_file, - mock_rpc_client, mock_get_vault_load_files): - - subcloud = fake_subcloud.create_fake_subcloud(self.ctx) - install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) - reinstall_data = copy.copy(FAKE_SUBCLOUD_BOOTSTRAP_PAYLOAD) - mock_get_request_data.return_value = reinstall_data + mock_get_vault_load_files): encoded_password = base64.b64encode( 'bmc_password'.encode("utf-8")).decode('utf-8') - bmc_password = {'bmc_password': encoded_password} - install_data.update(bmc_password) - mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + data_install = {**FAKE_SUBCLOUD_INSTALL_VALUES, + 'bmc_password': encoded_password} + + subcloud = fake_subcloud.create_fake_subcloud( + self.ctx, name=fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA["name"], + data_install=json.dumps(data_install)) + + fake_bootstrap_content = json.dumps( + fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA).encode("utf-8") + mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') - response = self.app.patch_json( - FAKE_URL + '/' + str(subcloud.id) + '/reinstall', - headers=FAKE_HEADERS, params=reinstall_data) + params = {'sysadmin_password': encoded_password} - mock_validate_install_parameters.assert_called_once() + response = self.app.patch( + FAKE_URL + '/' + str(subcloud.id) + '/reinstall', + headers=FAKE_HEADERS, params=params, + upload_files=[("bootstrap_values", + "bootstrap_fake_filename", + fake_bootstrap_content)]) + + mock_validate_bootstrap_values.assert_called_once() mock_validate_subcloud_config.assert_called_once() mock_validate_k8s_version.assert_called_once() - mock_rpc_client().reinstall_subcloud.assert_called_once_with( + self.mock_rpc_client().reinstall_subcloud.assert_called_once_with( mock.ANY, subcloud.id, mock.ANY) @@ -1711,20 +1628,19 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): mock_upload_deploy_config_file.assert_called_once() self.assertEqual(SW_VERSION, response.json['software-version']) + @mock.patch.object(psd_common, 'check_required_parameters') @mock.patch.object(cutils, 'get_vault_load_files') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, '_upload_deploy_config_file') - @mock.patch.object(subclouds.SubcloudsController, '_validate_k8s_version') - @mock.patch.object(subclouds.SubcloudsController, '_validate_subcloud_config') - @mock.patch.object(subclouds.SubcloudsController, '_validate_install_parameters') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'upload_deploy_config_file') + @mock.patch.object(psd_common, 'validate_k8s_version') + @mock.patch.object(psd_common, 'validate_subcloud_config') + @mock.patch.object(psd_common, 'validate_bootstrap_values') + @mock.patch.object(psd_common, 'get_subcloud_db_install_values') + @mock.patch.object(psd_common, 'get_request_data') def test_reinstall_subcloud_with_release_parameter( self, mock_get_request_data, mock_get_subcloud_db_install_values, mock_validate_install_parameters, mock_validate_subcloud_config, mock_validate_k8s_version, mock_upload_deploy_config_file, - mock_rpc_client, mock_get_vault_load_files): + mock_get_vault_load_files, mock_check_required_parameters): software_version = '21.12' subcloud = fake_subcloud.create_fake_subcloud(self.ctx) @@ -1739,7 +1655,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): install_data.update(bmc_password) mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + self.mock_rpc_client().reinstall_subcloud.return_value = True mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') response = self.app.patch_json( @@ -1748,7 +1664,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): mock_validate_install_parameters.assert_called_once() mock_validate_subcloud_config.assert_called_once() - mock_rpc_client().reinstall_subcloud.assert_called_once_with( + self.mock_rpc_client().reinstall_subcloud.assert_called_once_with( mock.ANY, subcloud.id, mock.ANY) @@ -1761,15 +1677,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): json.loads(response.json['data_install'])['software_version']) @mock.patch.object(cutils, 'get_vault_load_files') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_validate_install_parameters') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'get_subcloud_db_install_values') + @mock.patch.object(psd_common, 'validate_bootstrap_values') + @mock.patch.object(psd_common, 'get_request_data') def test_reinstall_subcloud_no_body( self, mock_get_request_data, mock_validate_install_parameters, - mock_get_subcloud_db_install_values, mock_rpc_client, - mock_get_vault_load_files): + mock_get_subcloud_db_install_values, mock_get_vault_load_files): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) @@ -1781,22 +1694,19 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): mock_validate_install_parameters.assert_not_called() mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + self.mock_rpc_client().reinstall_subcloud.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/reinstall', headers=FAKE_HEADERS, params={}) @mock.patch.object(cutils, 'get_vault_load_files') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_validate_install_parameters') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'get_subcloud_db_install_values') + @mock.patch.object(psd_common, 'validate_bootstrap_values') + @mock.patch.object(psd_common, 'get_request_data') def test_reinstall_online_subcloud( self, mock_get_request_data, mock_validate_install_parameters, - mock_get_subcloud_db_install_values, mock_rpc_client, - mock_get_vault_load_files): + mock_get_subcloud_db_install_values, mock_get_vault_load_files): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) db_api.subcloud_update( @@ -1813,19 +1723,16 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): mock_validate_install_parameters.assert_not_called() mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + self.mock_rpc_client().reinstall_subcloud.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/reinstall', headers=FAKE_HEADERS, params={}) - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'get_subcloud_db_install_values') + @mock.patch.object(psd_common, 'get_request_data') def test_reinstall_subcloud_missing_required_value( - self, mock_get_request_data, mock_get_subcloud_db_install_values, - mock_rpc_client): + self, mock_get_request_data, mock_get_subcloud_db_install_values): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) @@ -1835,7 +1742,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): bmc_password = {'bmc_password': encoded_password} install_data.update(bmc_password) mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + self.mock_rpc_client().reinstall_subcloud.return_value = True for k in ['name', 'system_mode', 'external_oam_subnet', 'external_oam_gateway_address', 'external_oam_floating_address', @@ -1848,19 +1755,18 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/reinstall', headers=FAKE_HEADERS, params=reinstall_data) + @mock.patch.object(psd_common, 'check_required_parameters') @mock.patch.object(cutils, 'get_vault_load_files') - @mock.patch.object(rpc_client, 'ManagerClient') - @mock.patch.object(subclouds.SubcloudsController, - '_get_subcloud_db_install_values') - @mock.patch.object(subclouds.SubcloudsController, '_validate_k8s_version') - @mock.patch.object(subclouds.SubcloudsController, '_validate_subcloud_config') - @mock.patch.object(subclouds.SubcloudsController, '_validate_install_parameters') - @mock.patch.object(subclouds.SubcloudsController, '_get_request_data') + @mock.patch.object(psd_common, 'get_subcloud_db_install_values') + @mock.patch.object(psd_common, 'validate_k8s_version') + @mock.patch.object(psd_common, 'validate_subcloud_config') + @mock.patch.object(psd_common, 'validate_bootstrap_values') + @mock.patch.object(psd_common, 'get_request_data') def test_reinstall_subcloud_missing_stored_value( self, mock_get_request_data, mock_validate_install_parameters, mock_validate_subcloud_config, mock_validate_k8s_version, - mock_get_subcloud_db_install_values, mock_rpc_client, - mock_get_vault_load_files): + mock_get_subcloud_db_install_values, mock_get_vault_load_files, + mock_check_required_parameters): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) @@ -1871,7 +1777,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): install_data.update(bmc_password) mock_get_subcloud_db_install_values.return_value = install_data - mock_rpc_client().reinstall_subcloud.return_value = True + self.mock_rpc_client().reinstall_subcloud.return_value = True mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') for k in ['management_subnet', 'management_start_address', @@ -1885,14 +1791,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): headers=FAKE_HEADERS, params=reinstall_data) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_validate_detailed(self, mock_get_prestage_payload, mock_prestage_subcloud_info, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( self.ctx, subcloud.id, availability_status=dccommon_consts.AVAILABILITY_ONLINE, @@ -1906,25 +1810,23 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_no_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - mock_rpc_client().prestage_subcloud.assert_called_once_with( + self.mock_rpc_client().prestage_subcloud.assert_called_once_with( mock.ANY, mock.ANY) self.assertEqual(response.status_int, 200) @mock.patch.object(cutils, 'get_systemcontroller_installed_loads') - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_invalid_release(self, mock_get_prestage_payload, mock_controller_upgrade, - mock_rpc_client, mock_installed_loads): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -1942,7 +1844,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): 'release': fake_release} mock_controller_upgrade.return_value = list() - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -1950,12 +1852,10 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') @mock.patch.object(prestage, '_get_system_controller_upgrades') def test_prestage_subcloud_unmanaged(self, mock_controller_upgrade, - mock_get_prestage_payload, - mock_rpc_client): + mock_get_prestage_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -1965,7 +1865,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): data = {'sysadmin_password': fake_password} mock_controller_upgrade.return_value = list() - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -1973,12 +1873,10 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') @mock.patch.object(prestage, '_get_system_controller_upgrades') def test_prestage_subcloud_offline(self, mock_controller_upgrade, - mock_get_prestage_payload, - mock_rpc_client): + mock_get_prestage_payload): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -1988,7 +1886,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): data = {'sysadmin_password': fake_password} mock_controller_upgrade.return_value = list() - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -1997,14 +1895,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): headers=FAKE_HEADERS, params=data) @mock.patch.object(cutils, 'get_systemcontroller_installed_loads') - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_duplex(self, mock_get_prestage_payload, mock_prestage_subcloud_info, mock_controller_upgrade, - mock_rpc_client, mock_installed_loads): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -2023,7 +1919,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_no_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2031,14 +1927,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_non_mgmt_alarm(self, mock_get_prestage_payload, mock_prestage_subcloud_info, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -2053,26 +1947,24 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_no_mgmt_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - mock_rpc_client().prestage_subcloud.assert_called_once_with( + self.mock_rpc_client().prestage_subcloud.assert_called_once_with( mock.ANY, mock.ANY) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_mgmt_alarm(self, mock_get_prestage_payload, mock_prestage_subcloud_info, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -2087,7 +1979,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_mgmt_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2095,14 +1987,12 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_mgmt_alarm_force(self, mock_get_prestage_payload, mock_prestage_subcloud_info, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update( @@ -2117,26 +2007,24 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_mgmt_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - mock_rpc_client().prestage_subcloud.assert_called_once_with( + self.mock_rpc_client().prestage_subcloud.assert_called_once_with( mock.ANY, mock.ANY) self.assertEqual(response.status_int, 200) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(prestage, '_get_prestage_subcloud_info') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_not_allowed_state(self, mock_get_prestage_payload, mock_prestage_subcloud_info, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) subcloud = db_api.subcloud_update(self.ctx, subcloud.id, @@ -2152,7 +2040,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): health_report_no_alarm, \ OAM_FLOATING_IP - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2160,19 +2048,17 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_controller_upgrading(self, mock_get_prestage_payload, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) fake_password = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii') data = {'sysadmin_password': fake_password} mock_controller_upgrade.return_value = list('upgrade') - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2180,18 +2066,16 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_no_password(self, mock_get_prestage_payload, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {} mock_controller_upgrade.return_value = list() - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2199,18 +2083,16 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): str(subcloud.id) + '/prestage', headers=FAKE_HEADERS, params=data) - @mock.patch.object(rpc_client, 'ManagerClient') @mock.patch.object(prestage, '_get_system_controller_upgrades') @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload') def test_prestage_subcloud_password_not_encoded(self, mock_get_prestage_payload, - mock_controller_upgrade, - mock_rpc_client): + mock_controller_upgrade): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) data = {'sysadmin_password': 'notencoded'} mock_controller_upgrade.return_value = list() - mock_rpc_client().prestage_subcloud.return_value = True + self.mock_rpc_client().prestage_subcloud.return_value = True mock_get_prestage_payload.return_value = data six.assertRaisesRegex(self, webtest.app.AppError, "400 *", @@ -2222,56 +2104,63 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): payload = { 'management_subnet': "192.168.204.0/24" } - self.assertEqual(cutils.get_management_subnet(payload), payload['management_subnet']) + self.assertEqual(cutils.get_management_subnet(payload), + payload['management_subnet']) def test_get_management_subnet_return_admin(self): payload = { 'admin_subnet': "192.168.205.0/24", 'management_subnet': "192.168.204.0/24" } - self.assertEqual(cutils.get_management_subnet(payload), payload['admin_subnet']) + self.assertEqual(cutils.get_management_subnet(payload), + payload['admin_subnet']) def test_get_management_start_address(self): payload = { 'management_start_address': "192.168.204.2" } - self.assertEqual(cutils.get_management_start_address(payload), payload['management_start_address']) + self.assertEqual(cutils.get_management_start_address(payload), + payload['management_start_address']) def test_get_management_start_address_return_admin(self): payload = { 'admin_start_address': "192.168.205.2", 'management_start_address': "192.168.204.2" } - self.assertEqual(cutils.get_management_start_address(payload), payload['admin_start_address']) + self.assertEqual(cutils.get_management_start_address(payload), + payload['admin_start_address']) def test_get_management_end_address(self): payload = { 'management_end_address': "192.168.204.50" } - self.assertEqual(cutils.get_management_end_address(payload), payload['management_end_address']) + self.assertEqual(cutils.get_management_end_address(payload), + payload['management_end_address']) def test_get_management_end_address_return_admin(self): payload = { 'admin_end_address': "192.168.205.50", 'management_end_address': "192.168.204.50" } - self.assertEqual(cutils.get_management_end_address(payload), payload['admin_end_address']) + self.assertEqual(cutils.get_management_end_address(payload), + payload['admin_end_address']) def test_get_management_gateway_address(self): payload = { 'management_gateway_address': "192.168.204.1" } - self.assertEqual(cutils.get_management_gateway_address(payload), payload['management_gateway_address']) + self.assertEqual(cutils.get_management_gateway_address(payload), + payload['management_gateway_address']) def test_get_management_gateway_address_return_admin(self): payload = { 'admin_gateway_address': "192.168.205.1", 'management_gateway_address': "192.168.204.1" } - self.assertEqual(cutils.get_management_gateway_address(payload), payload['admin_gateway_address']) + self.assertEqual(cutils.get_management_gateway_address(payload), + payload['admin_gateway_address']) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_validate_admin_config_subnet_small(self, mock_rpc_client): + def test_validate_admin_config_subnet_small(self): admin_subnet = "192.168.205.0/32" admin_start_address = "192.168.205.2" @@ -2281,8 +2170,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): six.assertRaisesRegex(self, Exception, "Subnet too small*", - subclouds.SubcloudsController(). - _validate_admin_network_config, + psd_common.validate_admin_network_config, admin_subnet, admin_start_address, admin_end_address, @@ -2290,8 +2178,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): existing_networks=None, operation=None) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_validate_admin_config_start_address_outOfSubnet(self, mock_rpc_client): + def test_validate_admin_config_start_address_outOfSubnet(self): admin_subnet = "192.168.205.0/28" admin_start_address = "192.168.205.200" @@ -2301,8 +2188,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): six.assertRaisesRegex(self, Exception, "Address must be in subnet*", - subclouds.SubcloudsController(). - _validate_admin_network_config, + psd_common.validate_admin_network_config, admin_subnet, admin_start_address, admin_end_address, @@ -2310,8 +2196,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): existing_networks=None, operation=None) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_validate_admin_config_end_address_outOfSubnet(self, mock_rpc_client): + def test_validate_admin_config_end_address_outOfSubnet(self): admin_subnet = "192.168.205.0/28" admin_start_address = "192.168.205.1" @@ -2321,8 +2206,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): six.assertRaisesRegex(self, Exception, "Address must be in subnet*", - subclouds.SubcloudsController(). - _validate_admin_network_config, + psd_common.validate_admin_network_config, admin_subnet, admin_start_address, admin_end_address, @@ -2330,8 +2214,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): existing_networks=None, operation=None) - @mock.patch.object(rpc_client, 'ManagerClient') - def test_validate_admin_config_gateway_address_outOfSubnet(self, mock_rpc_client): + def test_validate_admin_config_gateway_address_outOfSubnet(self): admin_subnet = "192.168.205.0/28" admin_start_address = "192.168.205.1" @@ -2341,8 +2224,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest): six.assertRaisesRegex(self, Exception, "Address must be in subnet*", - subclouds.SubcloudsController(). - _validate_admin_network_config, + psd_common.validate_admin_network_config, admin_subnet, admin_start_address, admin_end_address, diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_service.py b/distributedcloud/dcmanager/tests/unit/manager/test_service.py index cc6d60e07..2a05104ba 100644 --- a/distributedcloud/dcmanager/tests/unit/manager/test_service.py +++ b/distributedcloud/dcmanager/tests/unit/manager/test_service.py @@ -78,9 +78,9 @@ class TestDCManagerService(base.DCManagerTestCase): def test_add_subcloud(self, mock_subcloud_manager): self.service_obj.init_managers() self.service_obj.add_subcloud( - self.context, payload={'name': 'testname'}) + self.context, subcloud_id=1, payload={'name': 'testname'}) mock_subcloud_manager().add_subcloud.\ - assert_called_once_with(self.context, mock.ANY) + assert_called_once_with(self.context, 1, mock.ANY) @mock.patch.object(service, 'SubcloudManager') def test_delete_subcloud(self, mock_subcloud_manager): diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py index 63f3b4466..7264ce284 100644 --- a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py +++ b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py @@ -12,6 +12,7 @@ # under the License. # import base64 +import collections import copy import datetime @@ -28,6 +29,7 @@ sys.modules['fm_core'] = mock.Mock() import threading from dccommon import consts as dccommon_consts +from dccommon import subcloud_install from dccommon.utils import RunAnsible from dcmanager.common import consts from dcmanager.common import exceptions @@ -103,6 +105,7 @@ class FakeProject(object): self.name = projname self.id = projid + FAKE_PROJECTS = [ FakeProject( dccommon_consts.ADMIN_PROJECT_NAME, @@ -252,7 +255,7 @@ class FakeSysinvClient(object): class FakeException(Exception): - pass + pass FAKE_SUBCLOUD_PRESTAGE_PAYLOAD = { @@ -397,6 +400,10 @@ class TestSubcloudManager(base.DCManagerTestCase): self.mock_context.get_admin_context.return_value = self.ctx self.addCleanup(p.stop) + # Reset the regionone_data cache between tests + subcloud_manager.SubcloudManager.regionone_data = \ + collections.defaultdict(dict) + @staticmethod def create_subcloud_static(ctxt, **kwargs): values = { @@ -453,7 +460,7 @@ class TestSubcloudManager(base.DCManagerTestCase): sm.subcloud_deploy_install(self.ctx, subcloud.id, payload=fake_payload) mock_compose_install_command.assert_called_once_with( subcloud_name, - sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), + cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), FAKE_PREVIOUS_SW_VERSION) # Verify subcloud was updated with correct values @@ -497,8 +504,8 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA sm = subcloud_manager.SubcloudManager() - subcloud_dict = sm.subcloud_deploy_create(self.ctx, subcloud.id, - payload=values) + subcloud = sm.subcloud_deploy_create(self.ctx, subcloud.id, + payload=values) mock_get_cached_regionone_data.assert_called_once() mock_sysinv_client().create_route.assert_called() self.fake_dcorch_api.add_subcloud.assert_called_once() @@ -510,7 +517,7 @@ class TestSubcloudManager(base.DCManagerTestCase): # Verify subcloud was updated with correct values self.assertEqual(consts.DEPLOY_STATE_CREATED, - subcloud_dict['deploy-status']) + subcloud.deploy_status) # Verify subcloud was updated with correct values updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name']) @@ -529,12 +536,12 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_keystone_client.side_effect = FakeException('boom') sm = subcloud_manager.SubcloudManager() - subcloud_dict = sm.subcloud_deploy_create(self.ctx, subcloud.id, - payload=values) + subcloud = sm.subcloud_deploy_create(self.ctx, subcloud.id, + payload=values) # Verify subcloud was updated with correct values self.assertEqual(consts.DEPLOY_STATE_CREATE_FAILED, - subcloud_dict['deploy-status']) + subcloud.deploy_status) # Verify subcloud was updated with correct values updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name']) @@ -548,7 +555,7 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(RunAnsible, 'exec_playbook') def test_subcloud_deploy_bootstrap(self, mock_exec_playbook, mock_update_yml, mock_get_playbook_for_software_version, - mock_keyring, create_subcloud_inventory): + mock_keyring, mock_create_subcloud_inventory): mock_get_playbook_for_software_version.return_value = "22.12" mock_keyring.get_password.return_value = "testpass" mock_exec_playbook.return_value = False @@ -675,73 +682,72 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertEqual(consts.DEPLOY_STATE_DONE, updated_subcloud.deploy_status) + @mock.patch.object(subcloud_install.SubcloudInstall, 'prep') + @mock.patch.object(subcloud_install, 'KeystoneClient') + @mock.patch.object(subcloud_install, 'SysinvClient') @mock.patch.object(subcloud_manager.SubcloudManager, - 'compose_apply_command') - @mock.patch.object(subcloud_manager.SubcloudManager, - 'compose_rehome_command') + '_write_subcloud_ansible_config') @mock.patch.object(subcloud_manager.SubcloudManager, '_create_intermediate_ca_cert') - @mock.patch.object(cutils, 'delete_subcloud_inventory') + @mock.patch.object(subcloud_manager.SubcloudManager, + '_create_addn_hosts_dc') @mock.patch.object(subcloud_manager, 'OpenStackDriver') @mock.patch.object(subcloud_manager, 'SysinvClient') @mock.patch.object(subcloud_manager.SubcloudManager, - '_get_cached_regionone_data') - @mock.patch.object(subcloud_manager.SubcloudManager, - '_create_addn_hosts_dc') + '_write_deploy_files') @mock.patch.object(cutils, 'create_subcloud_inventory') - @mock.patch.object(subcloud_manager.SubcloudManager, - '_write_subcloud_ansible_config') - @mock.patch.object(subcloud_manager, - 'keyring') - @mock.patch.object(threading.Thread, - 'start') - def test_add_subcloud(self, mock_thread_start, mock_keyring, - mock_write_subcloud_ansible_config, - mock_create_subcloud_inventory, - mock_create_addn_hosts, - mock_get_cached_regionone_data, - mock_sysinv_client, - mock_keystone_client, - mock_delete_subcloud_inventory, + @mock.patch.object(subcloud_manager, 'keyring') + @mock.patch.object(cutils, 'get_playbook_for_software_version') + @mock.patch.object(cutils, 'update_values_on_yaml_file') + @mock.patch.object(RunAnsible, 'exec_playbook') + def test_add_subcloud(self, mock_exec_playbook, mock_update_yml, + mock_get_playbook_for_software_version, + mock_keyring, mock_create_subcloud_inventory, + mock_write_deploy_files, mock_sysinv_client, + mock_openstack_driver, mock_create_addn_hosts, mock_create_intermediate_ca_cert, - mock_compose_rehome_command, - mock_compose_apply_command): - values = utils.create_subcloud_dict(base.SUBCLOUD_SAMPLE_DATA_0) - values['deploy_status'] = consts.DEPLOY_STATE_NONE + mock_write_subcloud_ansible_config, + mock_install_ks_client, mock_install_sysinvclient, + mock_install_prep): + # Prepare the payload + install_values = copy.copy(fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES) + install_values['software_version'] = SW_VERSION + payload = {**fake_subcloud.FAKE_BOOTSTRAP_VALUE, + **fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA, + "sysadmin_password": "testpass", + 'bmc_password': 'bmc_pass', + 'install_values': install_values, + 'software_version': FAKE_PREVIOUS_SW_VERSION, + "deploy_playbook": "test_playbook.yaml", + "deploy_overrides": "test_overrides.yaml", + "deploy_chart": "test_chart.yaml", + "deploy_config": "subcloud1.yaml"} - # dcmanager add_subcloud queries the data from the db - subcloud = self.create_subcloud_static(self.ctx, name=values['name']) + # Create subcloud in DB + subcloud = self.create_subcloud_static(self.ctx, name=payload['name']) - mock_keystone_client().keystone_client = FakeKeystoneClient() - mock_keyring.get_password.return_value = "testpassword" - mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA + # Mock return values + mock_get_playbook_for_software_version.return_value = SW_VERSION + mock_keyring.get_password.return_value = payload['sysadmin_password'] + mock_exec_playbook.return_value = False + mock_openstack_driver().keystone_client = FakeKeystoneClient() + # Call the add method sm = subcloud_manager.SubcloudManager() - subcloud_dict = sm.add_subcloud(self.ctx, payload=values) - mock_get_cached_regionone_data.assert_called_once() - mock_sysinv_client().create_route.assert_called() - self.fake_dcorch_api.add_subcloud.assert_called_once() - mock_create_addn_hosts.assert_called_once() - mock_create_subcloud_inventory.assert_called_once() - mock_write_subcloud_ansible_config.assert_called_once() - mock_keyring.get_password.assert_called() - mock_thread_start.assert_called_once() - mock_create_intermediate_ca_cert.assert_called_once() - mock_compose_rehome_command.assert_not_called() - mock_compose_apply_command.assert_called_once_with( - values['name'], - sm._get_ansible_filename(values['name'], consts.INVENTORY_FILE_POSTFIX), - subcloud['software_version']) + sm.add_subcloud(self.ctx, subcloud.id, payload) - # Verify subcloud was updated with correct values - self.assertEqual(consts.DEPLOY_STATE_PRE_DEPLOY, - subcloud_dict['deploy-status']) - - # Verify subcloud was updated with correct values - updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name']) - self.assertEqual(consts.DEPLOY_STATE_PRE_DEPLOY, + # Verify results + updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name) + self.assertEqual(consts.DEPLOY_STATE_DONE, updated_subcloud.deploy_status) + mock_write_deploy_files.assert_called() + mock_keyring.get_password.assert_called() + mock_update_yml.assert_called() + mock_create_subcloud_inventory.assert_called() + mock_get_playbook_for_software_version.assert_called_once() + self.assertEqual(mock_exec_playbook.call_count, 3) + @mock.patch.object(subcloud_manager.SubcloudManager, 'compose_rehome_command') @mock.patch.object(subcloud_manager.SubcloudManager, @@ -758,10 +764,8 @@ class TestSubcloudManager(base.DCManagerTestCase): '_write_subcloud_ansible_config') @mock.patch.object(subcloud_manager, 'keyring') - @mock.patch.object(threading.Thread, - 'start') def test_add_subcloud_with_migration_option( - self, mock_thread_start, mock_keyring, + self, mock_keyring, mock_write_subcloud_ansible_config, mock_create_subcloud_inventory, mock_create_addn_hosts, @@ -783,24 +787,22 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA sm = subcloud_manager.SubcloudManager() - subcloud_dict = sm.add_subcloud(self.ctx, payload=values) + with mock.patch.object(sm, 'run_deploy_thread') as mock_run_deploy: + sm.add_subcloud(self.ctx, subcloud.id, payload=values) + mock_get_cached_regionone_data.assert_called_once() mock_sysinv_client().create_route.assert_called() self.fake_dcorch_api.add_subcloud.assert_called_once() mock_create_addn_hosts.assert_called_once() mock_create_subcloud_inventory.assert_called_once() mock_write_subcloud_ansible_config.assert_called_once() - mock_thread_start.assert_called_once() + mock_run_deploy.assert_called_once() mock_create_intermediate_ca_cert.assert_called_once() mock_compose_rehome_command.assert_called_once_with( values['name'], sm._get_ansible_filename(values['name'], consts.INVENTORY_FILE_POSTFIX), subcloud['software_version']) - # Verify subcloud was updated with correct values - self.assertEqual(consts.DEPLOY_STATE_PRE_REHOME, - subcloud_dict['deploy-status']) - # Verify subcloud was updated with correct values updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name']) self.assertEqual(consts.DEPLOY_STATE_PRE_REHOME, @@ -809,28 +811,28 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(subcloud_manager, 'OpenStackDriver') @mock.patch.object(subcloud_manager, 'SysinvClient') @mock.patch.object(subcloud_manager.SubcloudManager, '_get_cached_regionone_data') - def test_add_subcloud_deploy_prep_failed(self, - mock_get_cached_regionone_data, - mock_sysinv_client, - mock_keystone_client): + def test_add_subcloud_create_failed(self, + mock_get_cached_regionone_data, + mock_sysinv_client, + mock_keystone_client): values = utils.create_subcloud_dict(base.SUBCLOUD_SAMPLE_DATA_0) services = FAKE_SERVICES # dcmanager add_subcloud queries the data from the db - self.create_subcloud_static(self.ctx, name=values['name']) + subcloud = self.create_subcloud_static(self.ctx, name=values['name']) self.fake_dcorch_api.add_subcloud.side_effect = FakeException('boom') mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA mock_keystone_client().services_list = services sm = subcloud_manager.SubcloudManager() - sm.add_subcloud(self.ctx, payload=values) + sm.add_subcloud(self.ctx, subcloud.id, payload=values) mock_get_cached_regionone_data.assert_called_once() mock_sysinv_client().create_route.assert_called() # Verify subcloud was updated with correct values subcloud = db_api.subcloud_get_by_name(self.ctx, values['name']) - self.assertEqual(consts.DEPLOY_STATE_DEPLOY_PREP_FAILED, + self.assertEqual(consts.DEPLOY_STATE_CREATE_FAILED, subcloud.deploy_status) @mock.patch.object(subcloud_manager, 'OpenStackDriver') @@ -843,14 +845,14 @@ class TestSubcloudManager(base.DCManagerTestCase): services = FAKE_SERVICES # dcmanager add_subcloud queries the data from the db - self.create_subcloud_static(self.ctx, name=values['name']) + subcloud = self.create_subcloud_static(self.ctx, name=values['name']) self.fake_dcorch_api.add_subcloud.side_effect = FakeException('boom') mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA mock_keystone_client().services_list = services sm = subcloud_manager.SubcloudManager() - sm.add_subcloud(self.ctx, payload=values) + sm.add_subcloud(self.ctx, subcloud.id, payload=values) mock_get_cached_regionone_data.assert_called_once() mock_sysinv_client().create_route.assert_called() @@ -1676,9 +1678,8 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_prepare_for_deployment.assert_called_once() def test_get_ansible_filename(self): - sm = subcloud_manager.SubcloudManager() - filename = sm._get_ansible_filename('subcloud1', - consts.INVENTORY_FILE_POSTFIX) + filename = cutils.get_ansible_filename('subcloud1', + consts.INVENTORY_FILE_POSTFIX) self.assertEqual(filename, f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml') @@ -1701,15 +1702,15 @@ class TestSubcloudManager(base.DCManagerTestCase): ) @mock.patch('os.path.isfile') - def test_compose_apply_command(self, mock_isfile): + def test_compose_bootstrap_command(self, mock_isfile): mock_isfile.return_value = True sm = subcloud_manager.SubcloudManager() - apply_command = sm.compose_apply_command( + bootstrap_command = sm.compose_bootstrap_command( 'subcloud1', f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml', FAKE_PREVIOUS_SW_VERSION) self.assertEqual( - apply_command, + bootstrap_command, [ 'ansible-playbook', cutils.get_playbook_for_software_version( @@ -1722,19 +1723,19 @@ class TestSubcloudManager(base.DCManagerTestCase): ] ) - def test_compose_deploy_command(self): + def test_compose_config_command(self): sm = subcloud_manager.SubcloudManager() fake_payload = {"sysadmin_password": "testpass", "deploy_playbook": "test_playbook.yaml", "deploy_overrides": "test_overrides.yaml", "deploy_chart": "test_chart.yaml", "deploy_config": "subcloud1.yaml"} - deploy_command = sm.compose_deploy_command( + config_command = sm.compose_config_command( 'subcloud1', f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml', fake_payload) self.assertEqual( - deploy_command, + config_command, [ 'ansible-playbook', 'test_playbook.yaml', '-e', f'@{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_deploy_values.yml', '-i', @@ -1773,7 +1774,7 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object( subcloud_manager.SubcloudManager, 'compose_install_command') @mock.patch.object( - subcloud_manager.SubcloudManager, 'compose_apply_command') + subcloud_manager.SubcloudManager, 'compose_bootstrap_command') @mock.patch.object(cutils, 'create_subcloud_inventory') @mock.patch.object(subcloud_manager.SubcloudManager, '_get_cached_regionone_data') @mock.patch.object(subcloud_manager, 'OpenStackDriver') @@ -1782,7 +1783,7 @@ class TestSubcloudManager(base.DCManagerTestCase): def test_reinstall_subcloud( self, mock_keyring, mock_thread_start, mock_keystone_client, mock_get_cached_regionone_data, mock_create_subcloud_inventory, - mock_compose_apply_command, mock_compose_install_command, + mock_compose_bootstrap_command, mock_compose_install_command, mock_create_intermediate_ca_cert, mock_write_subcloud_ansible_config): subcloud_name = 'subcloud1' @@ -1812,11 +1813,11 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_write_subcloud_ansible_config.assert_called_once() mock_compose_install_command.assert_called_once_with( subcloud_name, - sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), + cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), FAKE_PREVIOUS_SW_VERSION) - mock_compose_apply_command.assert_called_once_with( + mock_compose_bootstrap_command.assert_called_once_with( subcloud_name, - sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), + cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX), FAKE_PREVIOUS_SW_VERSION) mock_thread_start.assert_called_once() @@ -2336,13 +2337,10 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertEqual(mock_run_ansible.call_count, 2) # Verify the "image_list_file" was passed to the prestage image playbook # for the remote prestage - self.assertTrue( - 'image_list_file' in mock_run_ansible.call_args_list[1].args[1][5]) + self.assertIn('image_list_file', mock_run_ansible.call_args_list[1].args[1][5]) # Verify the prestage request release was passed to the playbooks - self.assertTrue( - FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5]) - self.assertTrue( - FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5]) + self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[0].args[1][5]) + self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[1].args[1][5]) @mock.patch.object(os_path, 'isdir') @mock.patch.object(cutils, 'get_filename_by_prefix') @@ -2439,10 +2437,8 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertTrue( 'image_list_file' not in mock_run_ansible.call_args_list[1].args[1][5]) # Verify the prestage request release was passed to the playbooks - self.assertTrue( - FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5]) - self.assertTrue( - FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5]) + self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[0].args[1][5]) + self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[1].args[1][5]) @mock.patch.object(prestage, 'prestage_images') @mock.patch.object(prestage, 'prestage_packages')