From f174505b6666508a13bf670de89b9952d4c6ed93 Mon Sep 17 00:00:00 2001
From: Li Zhu
Date: Tue, 18 Apr 2023 23:40:21 -0400
Subject: [PATCH] Add release optionality to subcloud prestage
Add an optional --release parameter to subcloud prestage and
prestage_strategy create commands to enable release optionality
in subcloud prestage.
Test Plan:
- Verify successful subcloud prestage with specified 21.12 or 22.12
release.
- Verify successful subcloud prestage with the active release (22.12)
when the release parameter is absent.
- Verify the subcloud prestage request was rejected by specifying
a release other than the subcloud and system controller's current
release(s) and any inactive load release.
- Verify successful creation of prestage strategy with specified
21.12 or 22.12 release.
- Verify successful creation of prestage strategy with the active
release (22.12) when the release parameter is absent.
Depends-On: https://review.opendev.org/c/starlingx/ansible-playbooks/+/880788
Story: 2010611
Task: 47848
Signed-off-by: lzhu1
Change-Id: I125b164c223074b42f16c9cf039771a4802d44dc
---
api-ref/source/api-ref-dcmanager-v1.rst | 72 +++++++
api-ref/source/parameters.yaml | 12 ++
.../subcloud-patch-prestage-request.json | 5 +
.../subcloud-patch-prestage-response.json | 26 +++
distributedcloud/dccommon/subcloud_install.py | 7 +-
.../dcmanager/api/controllers/v1/subclouds.py | 14 +-
distributedcloud/dcmanager/common/consts.py | 10 +-
distributedcloud/dcmanager/common/prestage.py | 177 ++++++------------
distributedcloud/dcmanager/common/utils.py | 30 +++
.../dcmanager/manager/subcloud_manager.py | 24 +--
.../orchestrator/prestage_orch_thread.py | 4 +-
.../orchestrator/states/prestage/states.py | 42 ++---
.../orchestrator/sw_update_manager.py | 18 +-
.../unit/api/v1/controllers/test_subclouds.py | 42 ++++-
.../unit/manager/test_subcloud_manager.py | 158 +++++-----------
.../states/prestage/test_states.py | 91 +--------
.../orchestrator/test_sw_update_manager.py | 11 +-
17 files changed, 362 insertions(+), 381 deletions(-)
create mode 100644 api-ref/source/samples/subclouds/subcloud-patch-prestage-request.json
create mode 100644 api-ref/source/samples/subclouds/subcloud-patch-prestage-response.json
diff --git a/api-ref/source/api-ref-dcmanager-v1.rst b/api-ref/source/api-ref-dcmanager-v1.rst
index cdca8937f..3869ff74d 100644
--- a/api-ref/source/api-ref-dcmanager-v1.rst
+++ b/api-ref/source/api-ref-dcmanager-v1.rst
@@ -541,6 +541,78 @@ Response Example
.. literalinclude:: samples/subclouds/subcloud-patch-reinstall-response.json
:language: json
+********************************
+Prestage a specific subcloud
+********************************
+
+.. rest_method:: PATCH /v1.0/subclouds/{subcloud}/prestage
+
+Prestages a subcloud with software packages and container image archives.
+The prestaged data is stored in the subcloud persistent file system
+that can be used when the subcloud is reinstalled next.
+
+**Normal response codes**
+
+200
+
+**Error response codes**
+
+badRequest (400), unauthorized (401), forbidden (403), badMethod (405),
+HTTPUnprocessableEntity (422), internalServerError (500),
+serviceUnavailable (503)
+
+**Request parameters**
+
+.. rest_parameters:: parameters.yaml
+
+ - subcloud: subcloud_uri
+ - release: release
+ - sysadmin_password: sysadmin_password
+ - force: force
+
+Request Example
+----------------
+
+.. literalinclude:: samples/subclouds/subcloud-patch-prestage-request.json
+ :language: json
+
+**Response parameters**
+
+.. rest_parameters:: parameters.yaml
+
+ - id: subcloud_id
+ - group_id: group_id
+ - name: subcloud_name
+ - description: subcloud_description
+ - location: subcloud_location
+ - software-version: software_version
+ - availability-status: availability_status
+ - error-description: error_description
+ - deploy-status: deploy_status
+ - backup-status: backup_status
+ - backup-datetime: backup_datetime
+ - openstack-installed: openstack_installed
+ - management-state: management_state
+ - systemcontroller-gateway-ip: systemcontroller_gateway_ip
+ - management-start-ip: management_start_ip
+ - management-end-ip: management_end_ip
+ - management-subnet: management_subnet
+ - management-gateway-ip: management_gateway_ip
+ - prestage_software_version: prestage_software_version
+ - created-at: created_at
+ - updated-at: updated_at
+ - data_install: data_install
+ - data_upgrade: data_upgrade
+ - endpoint_sync_status: endpoint_sync_status
+ - sync_status: sync_status
+ - endpoint_type: sync_status_type
+
+Response Example
+----------------
+
+.. literalinclude:: samples/subclouds/subcloud-patch-prestage-response.json
+ :language: json
+
*****************************************
Update the status of a specific subcloud
*****************************************
diff --git a/api-ref/source/parameters.yaml b/api-ref/source/parameters.yaml
index 12071bcbf..2329b4122 100644
--- a/api-ref/source/parameters.yaml
+++ b/api-ref/source/parameters.yaml
@@ -225,6 +225,12 @@ extra_args:
in: body
required: false
type: dictionary
+force:
+ description: |
+ Indicates whether to disregard subcloud management alarm condition.
+ in: body
+ required: false
+ type: boolean
group_id:
description: |
The ID of a subcloud group. Default is 1.
@@ -317,6 +323,12 @@ patch_strategy_upload_only:
in: body
required: false
type: boolean
+prestage_software_version:
+ description: |
+ The prestage software version for the subcloud.
+ in: body
+ required: true
+ type: string
region_name:
description: |
The name provisioned for the subcloud (synonym for subcloud name).
diff --git a/api-ref/source/samples/subclouds/subcloud-patch-prestage-request.json b/api-ref/source/samples/subclouds/subcloud-patch-prestage-request.json
new file mode 100644
index 000000000..9c2f59134
--- /dev/null
+++ b/api-ref/source/samples/subclouds/subcloud-patch-prestage-request.json
@@ -0,0 +1,5 @@
+{
+ "sysadmin_password": "XXXXXXX",
+ "release": "21.12",
+ "force": "true"
+}
diff --git a/api-ref/source/samples/subclouds/subcloud-patch-prestage-response.json b/api-ref/source/samples/subclouds/subcloud-patch-prestage-response.json
new file mode 100644
index 000000000..872eb5289
--- /dev/null
+++ b/api-ref/source/samples/subclouds/subcloud-patch-prestage-response.json
@@ -0,0 +1,26 @@
+{
+ "id": 1,
+ "name": "subcloud1",
+ "created-at": "2021-11-08T18:41:19.530228",
+ "updated-at": "2021-11-15T14:15:59.944851",
+ "availability-status": "online",
+ "data_install": {
+ "bootstrap_interface": "eno1"
+ },
+ "data_upgrade": null,
+ "deploy-status": "complete",
+ "backup-status": "complete",
+ "backup-datetime": "2022-07-08 11:23:58.132134",
+ "description": "Ottawa Site",
+ "group_id": 1,
+ "location": "YOW",
+ "management-end-ip": "192.168.101.50",
+ "management-gateway-ip": "192.168.101.1",
+ "management-start-ip": "192.168.101.2",
+ "management-state": "unmanaged",
+ "management-subnet": "192.168.101.0/24",
+ "openstack-installed": false,
+ "software-version": "21.12",
+ "systemcontroller-gateway-ip": "192.168.204.101",
+ "prestage-software-version": "21.12",
+}
diff --git a/distributedcloud/dccommon/subcloud_install.py b/distributedcloud/dccommon/subcloud_install.py
index bc391ccf0..8233e6493 100644
--- a/distributedcloud/dccommon/subcloud_install.py
+++ b/distributedcloud/dccommon/subcloud_install.py
@@ -416,7 +416,7 @@ class SubcloudInstall(object):
result.stdout.decode('utf-8').replace('\n', ', '))
raise Exception(msg)
- def cleanup(self):
+ def cleanup(self, software_version=None):
# Do not remove the input_iso if it is in the Load Vault
if (self.input_iso is not None and
not self.input_iso.startswith(consts.LOAD_VAULT_DIR) and
@@ -424,7 +424,7 @@ class SubcloudInstall(object):
os.remove(self.input_iso)
if (self.www_root is not None and os.path.isdir(self.www_root)):
- if dccommon_utils.is_debian():
+ if dccommon_utils.is_debian(software_version):
cleanup_cmd = [
GEN_ISO_COMMAND,
"--id", self.name,
@@ -439,6 +439,7 @@ class SubcloudInstall(object):
"--delete"
]
try:
+ LOG.info("Running install cleanup: %s", self.name)
with open(os.devnull, "w") as fnull:
subprocess.check_call( # pylint: disable=E1102
cleanup_cmd, stdout=fnull, stderr=fnull)
@@ -569,7 +570,7 @@ class SubcloudInstall(object):
if os.path.isdir(iso_dir_path):
LOG.info("Found preexisting iso dir for subcloud %s, cleaning up",
self.name)
- self.cleanup()
+ self.cleanup(software_version)
# Update the default iso image based on the install values
# Runs gen-bootloader-iso.sh
diff --git a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py
index dff840229..9040926c2 100644
--- a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py
+++ b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py
@@ -213,7 +213,7 @@ class SubcloudsController(object):
@staticmethod
def _get_prestage_payload(request):
- fields = ['sysadmin_password', 'force']
+ fields = ['sysadmin_password', 'force', consts.PRESTAGE_REQUEST_RELEASE]
payload = {
'force': False
}
@@ -243,6 +243,8 @@ class SubcloudsController(object):
else:
pecan.abort(
400, _('Invalid value for force option: %s' % val))
+ elif field == consts.PRESTAGE_REQUEST_RELEASE:
+ payload[consts.PRESTAGE_REQUEST_RELEASE] = val
return payload
def _upload_config_file(self, file_item, config_file, config_type):
@@ -1516,12 +1518,18 @@ class SubcloudsController(object):
LOG.exception("validate_prestage failed")
pecan.abort(400, _(str(exc)))
+ prestage_software_version = payload.get(
+ consts.PRESTAGE_REQUEST_RELEASE, tsc.SW_VERSION)
+
try:
self.dcmanager_rpc_client.prestage_subcloud(context, payload)
# local update to deploy_status - this is just for CLI response:
- subcloud.deploy_status = consts.PRESTAGE_STATE_PREPARE
- return db_api.subcloud_db_model_to_dict(subcloud)
+ subcloud.deploy_status = consts.PRESTAGE_STATE_PACKAGES
+ subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud)
+ subcloud_dict.update(
+ {consts.PRESTAGE_SOFTWARE_VERSION: prestage_software_version})
+ return subcloud_dict
except RemoteError as e:
pecan.abort(422, e.value)
except Exception:
diff --git a/distributedcloud/dcmanager/common/consts.py b/distributedcloud/dcmanager/common/consts.py
index 9b613f400..3a686f5cc 100644
--- a/distributedcloud/dcmanager/common/consts.py
+++ b/distributedcloud/dcmanager/common/consts.py
@@ -154,7 +154,6 @@ STRATEGY_STATE_APPLYING_VIM_KUBE_ROOTCA_UPDATE_STRATEGY = \
# Prestage orchestration states (ordered)
STRATEGY_STATE_PRESTAGE_PRE_CHECK = "prestage-precheck"
-STRATEGY_STATE_PRESTAGE_PREPARE = "prestage-prepare"
STRATEGY_STATE_PRESTAGE_PACKAGES = "prestaging-packages"
STRATEGY_STATE_PRESTAGE_IMAGES = "prestaging-images"
@@ -255,15 +254,11 @@ UPGRADE_STATE_ACTIVATION_FAILED = 'activation-failed'
UPGRADE_STATE_ACTIVATION_COMPLETE = 'activation-complete'
# Prestage States
-PRESTAGE_STATE_PREPARE = STRATEGY_STATE_PRESTAGE_PREPARE
PRESTAGE_STATE_PACKAGES = STRATEGY_STATE_PRESTAGE_PACKAGES
PRESTAGE_STATE_IMAGES = STRATEGY_STATE_PRESTAGE_IMAGES
PRESTAGE_STATE_FAILED = 'prestage-failed'
PRESTAGE_STATE_COMPLETE = 'prestage-complete'
-# Prestage preparation timeout
-PRESTAGE_PREPARE_TIMEOUT = 900 # 15 minutes
-
# Alarm aggregation
ALARMS_DISABLED = "disabled"
ALARM_OK_STATUS = "OK"
@@ -302,6 +297,7 @@ SYSTEM_MODE_DUPLEX_DIRECT = "duplex-direct"
# Load states
ACTIVE_LOAD_STATE = 'active'
+INACTIVE_LOAD_STATE = 'inactive'
IMPORTING_LOAD_STATE = 'importing'
IMPORTED_LOAD_STATE = 'imported'
IMPORTED_METADATA_LOAD_STATE = 'imported-metadata'
@@ -324,6 +320,10 @@ EXTRA_ARGS_FORCE = 'force'
# extra_args for patching
EXTRA_ARGS_UPLOAD_ONLY = 'upload-only'
+# http request/response arguments for prestage
+PRESTAGE_SOFTWARE_VERSION = 'prestage-software-version'
+PRESTAGE_REQUEST_RELEASE = 'release'
+
# Device Image Bitstream Types
BITSTREAM_TYPE_ROOT_KEY = 'root-key'
BITSTREAM_TYPE_FUNCTIONAL = 'functional'
diff --git a/distributedcloud/dcmanager/common/prestage.py b/distributedcloud/dcmanager/common/prestage.py
index bc3617eb8..557f8ecfe 100644
--- a/distributedcloud/dcmanager/common/prestage.py
+++ b/distributedcloud/dcmanager/common/prestage.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2023 Wind River Systems, Inc.
+# Copyright (c) 2022-2023 Wind River Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -34,7 +34,6 @@ from dccommon.drivers.openstack.sdk_platform import OpenStackDriver
from dccommon.drivers.openstack.sysinv_v1 import SysinvClient
from dccommon.exceptions import PlaybookExecutionFailed
from dccommon.exceptions import PlaybookExecutionTimeout
-from dccommon.utils import LAST_SW_VERSION_IN_CENTOS
from dccommon.utils import run_playbook
from dcmanager.common import consts
@@ -45,14 +44,7 @@ from dcmanager.db import api as db_api
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
-PRESTAGING_REPO_DIR = '/var/run/prestaging_repo'
-DEPLOY_BASE_DIR = dccommon_consts.DEPLOY_DIR + '/' + SW_VERSION
-PRESTAGE_PREPARATION_COMPLETED_FILE = os.path.join(
- PRESTAGING_REPO_DIR, '.prestage_preparation_completed')
-PRESTAGE_PREPARATION_FAILED_FILE = os.path.join(
- DEPLOY_BASE_DIR, '.prestage_preparation_failed')
-ANSIBLE_PREPARE_PRESTAGE_PACKAGES_PLAYBOOK = \
- "/usr/share/ansible/stx-ansible/playbooks/prepare_prestage_packages.yml"
+DEPLOY_BASE_DIR = dccommon_consts.DEPLOY_DIR
ANSIBLE_PRESTAGE_SUBCLOUD_PACKAGES_PLAYBOOK = \
"/usr/share/ansible/stx-ansible/playbooks/prestage_sw_packages.yml"
ANSIBLE_PRESTAGE_SUBCLOUD_IMAGES_PLAYBOOK = \
@@ -61,8 +53,7 @@ ANSIBLE_PRESTAGE_INVENTORY_SUFFIX = '_prestage_inventory.yml'
def is_deploy_status_prestage(deploy_status):
- return deploy_status in (consts.PRESTAGE_STATE_PREPARE,
- consts.PRESTAGE_STATE_PACKAGES,
+ return deploy_status in (consts.PRESTAGE_STATE_PACKAGES,
consts.PRESTAGE_STATE_IMAGES,
consts.PRESTAGE_STATE_FAILED,
consts.PRESTAGE_STATE_COMPLETE)
@@ -121,7 +112,7 @@ def global_prestage_validate(payload):
" Details: %s" % ex)
-def initial_subcloud_validate(subcloud):
+def initial_subcloud_validate(subcloud, installed_loads, software_version):
"""Basic validation a subcloud prestage operation.
Raises a PrestageCheckFailedException on failure.
@@ -152,6 +143,18 @@ def initial_subcloud_validate(subcloud):
" The current deploy status is %s."
% (', '.join(allowed_deploy_states), subcloud.deploy_status))
+ # The request software version must be either the same as the software version
+ # of the subcloud or any active/inactive/imported load on the system controller
+ # (can be checked with "system load-list" command).
+ if software_version and \
+ software_version != subcloud.software_version and \
+ software_version not in installed_loads:
+ raise exceptions.PrestagePreCheckFailedException(
+ subcloud=subcloud.name,
+ orch_skip=True,
+ details="Specified release is not supported. "
+ "%s version must first be imported" % software_version)
+
def validate_prestage(subcloud, payload):
"""Validate a subcloud prestage operation.
@@ -167,8 +170,14 @@ def validate_prestage(subcloud, payload):
"""
LOG.debug("Validating subcloud prestage '%s'", subcloud.name)
+ installed_loads = []
+ software_version = None
+ if payload.get(consts.PRESTAGE_REQUEST_RELEASE):
+ software_version = payload.get(consts.PRESTAGE_REQUEST_RELEASE)
+ installed_loads = utils.get_systemcontroller_installed_loads()
+
# re-run the initial validation
- initial_subcloud_validate(subcloud)
+ initial_subcloud_validate(subcloud, installed_loads, software_version)
subcloud_type, system_health, oam_floating_ip = \
_get_prestage_subcloud_info(subcloud.name)
@@ -192,18 +201,10 @@ def validate_prestage(subcloud, payload):
return oam_floating_ip
-@utils.synchronized('prestage-prepare-cleanup', external=True)
-def cleanup_failed_preparation():
- """Remove the preparation failed file if it exists from a previous run"""
- if os.path.exists(PRESTAGE_PREPARATION_FAILED_FILE):
- LOG.debug("Cleanup: removing %s", PRESTAGE_PREPARATION_FAILED_FILE)
- os.remove(PRESTAGE_PREPARATION_FAILED_FILE)
-
-
def prestage_start(context, subcloud_id):
subcloud = db_api.subcloud_update(
context, subcloud_id,
- deploy_status=consts.PRESTAGE_STATE_PREPARE)
+ deploy_status=consts.PRESTAGE_STATE_PACKAGES)
return subcloud
@@ -219,8 +220,8 @@ def prestage_fail(context, subcloud_id):
deploy_status=consts.PRESTAGE_STATE_FAILED)
-def is_upgrade(subcloud_version):
- return SW_VERSION != subcloud_version
+def is_local(subcloud_version, specified_version):
+ return subcloud_version == specified_version
def prestage_subcloud(context, payload):
@@ -228,15 +229,13 @@ def prestage_subcloud(context, payload):
This is the standalone (not orchestrated) prestage implementation.
- 4 phases:
+ 3 phases:
1. Prestage validation (already done by this point)
- Subcloud exists, is online, is managed, is AIO-SX
- Subcloud has no management-affecting alarms (unless force is given)
- 2. Packages preparation
- - prestage-prepare-packages.sh
- 3. Packages prestaging
+ 2. Packages prestaging
- run prestage_packages.yml ansible playbook
- 4. Images prestaging
+ 3. Images prestaging
- run prestage_images.yml ansible playbook
"""
subcloud_name = payload['subcloud_name']
@@ -251,7 +250,6 @@ def prestage_subcloud(context, payload):
subcloud=subcloud_name,
details="Subcloud does not exist")
- cleanup_failed_preparation()
subcloud = prestage_start(context, subcloud.id)
try:
apply_thread = threading.Thread(
@@ -267,81 +265,9 @@ def prestage_subcloud(context, payload):
prestage_fail(context, subcloud.id)
-def _sync_run_prestage_prepare_packages(context, subcloud, payload):
- """Run prepare prestage packages ansible script."""
-
- if os.path.exists(PRESTAGE_PREPARATION_FAILED_FILE):
- LOG.warn("Subcloud %s prestage preparation aborted due to "
- "previous %s failure", subcloud.name,
- consts.PRESTAGE_STATE_PREPARE)
- raise Exception("Aborted due to previous %s failure"
- % consts.PRESTAGE_STATE_PREPARE)
-
- LOG.info("Running prepare prestage ansible script, version=%s "
- "(subcloud_id=%s)", SW_VERSION, subcloud.id)
- db_api.subcloud_update(context,
- subcloud.id,
- deploy_status=consts.PRESTAGE_STATE_PREPARE)
-
- # Ansible inventory filename for the specified subcloud
- ansible_subcloud_inventory_file = \
- utils.get_ansible_filename(subcloud.name,
- ANSIBLE_PRESTAGE_INVENTORY_SUFFIX)
-
- extra_vars_str = "current_software_version=%s previous_software_version=%s" \
- % (SW_VERSION, subcloud.software_version)
-
- try:
- _run_ansible(context,
- ["ansible-playbook",
- ANSIBLE_PREPARE_PRESTAGE_PACKAGES_PLAYBOOK,
- "--inventory", ansible_subcloud_inventory_file,
- "--extra-vars", extra_vars_str],
- "prepare",
- subcloud,
- consts.PRESTAGE_STATE_PREPARE,
- payload['sysadmin_password'],
- payload['oam_floating_ip'],
- ansible_subcloud_inventory_file,
- consts.PRESTAGE_PREPARE_TIMEOUT)
- except Exception:
- # Flag the failure on file system so that other orchestrated
- # strategy steps in this run fail immediately. This file is
- # removed at the start of each orchestrated/standalone run.
- # This creates the file if it doesn't exist:
- with open(PRESTAGE_PREPARATION_FAILED_FILE, 'a'):
- pass
- raise
-
- LOG.info("Prepare prestage ansible successful")
-
-
-# TODO(Shrikumar): Cleanup this function, especially the comparison for
-# software versions.
-# Rationale: In CentOS, prestage_prepare is required; in Debian, it is not.
-
-
-@utils.synchronized('prestage-prepare-packages', external=True)
-def prestage_prepare(context, subcloud, payload):
- """Run the prepare prestage packages playbook if required."""
- if SW_VERSION > LAST_SW_VERSION_IN_CENTOS:
- LOG.info("Skipping prestage package preparation in Debian")
- return
-
- if is_upgrade(subcloud.software_version):
- if not os.path.exists(PRESTAGE_PREPARATION_COMPLETED_FILE):
- _sync_run_prestage_prepare_packages(context, subcloud, payload)
- else:
- LOG.info(
- "Skipping prestage package preparation (not required)")
- else:
- LOG.info("Skipping prestage package preparation (reinstall)")
-
-
def _prestage_standalone_thread(context, subcloud, payload):
"""Run the prestage operations inside a separate thread"""
try:
- prestage_prepare(context, subcloud, payload)
prestage_packages(context, subcloud, payload)
prestage_images(context, subcloud, payload)
@@ -382,19 +308,15 @@ def _get_prestage_subcloud_info(subcloud_name):
def _run_ansible(context, prestage_command, phase,
subcloud, deploy_status,
sysadmin_password, oam_floating_ip,
+ software_version,
ansible_subcloud_inventory_file,
timeout_seconds=None):
if not timeout_seconds:
# We always want to set a timeout in prestaging operations:
timeout_seconds = CONF.playbook_timeout
- if deploy_status == consts.PRESTAGE_STATE_PREPARE:
- LOG.info(("Preparing prestage shared packages for subcloud: %s, "
- "version: %s, timeout: %ss"),
- subcloud.name, SW_VERSION, timeout_seconds)
- else:
- LOG.info("Prestaging %s for subcloud: %s, version: %s, timeout: %ss",
- phase, subcloud.name, SW_VERSION, timeout_seconds)
+ LOG.info("Prestaging %s for subcloud: %s, version: %s, timeout: %ss",
+ phase, subcloud.name, software_version, timeout_seconds)
db_api.subcloud_update(context,
subcloud.id,
@@ -436,7 +358,9 @@ def prestage_packages(context, subcloud, payload):
utils.get_ansible_filename(subcloud.name,
ANSIBLE_PRESTAGE_INVENTORY_SUFFIX)
- extra_vars_str = "software_version=%s" % SW_VERSION
+ prestage_software_version = payload.get(
+ consts.PRESTAGE_REQUEST_RELEASE, SW_VERSION)
+ extra_vars_str = "software_version=%s" % prestage_software_version
_run_ansible(context,
["ansible-playbook",
ANSIBLE_PRESTAGE_SUBCLOUD_PACKAGES_PLAYBOOK,
@@ -447,6 +371,7 @@ def prestage_packages(context, subcloud, payload):
consts.PRESTAGE_STATE_PACKAGES,
payload['sysadmin_password'],
payload['oam_floating_ip'],
+ prestage_software_version,
ansible_subcloud_inventory_file)
@@ -457,21 +382,26 @@ def prestage_images(context, subcloud, payload):
If the prestage images file has been uploaded for the target software
version then pass the image_list_file to the prestage_images.yml playbook
- If the images file does not exist and the prestage is for upgrade,
+ If the images file does not exist and the prestage source is remote,
skip calling prestage_images.yml playbook.
Ensure the final state is either prestage-failed or prestage-complete
regardless of whether prestage_images.yml playbook is executed or skipped.
"""
- upgrade = is_upgrade(subcloud.software_version)
- extra_vars_str = "software_version=%s" % SW_VERSION
+ prestage_software_version = payload.get(
+ consts.PRESTAGE_REQUEST_RELEASE, SW_VERSION)
+ local = is_local(subcloud.software_version, prestage_software_version)
+ extra_vars_str = "software_version=%s" % prestage_software_version
image_list_file = None
- if upgrade:
- image_list_filename = utils.get_filename_by_prefix(DEPLOY_BASE_DIR,
- 'prestage_images')
+ deploy_dir = os.path.join(DEPLOY_BASE_DIR, prestage_software_version)
+ if not local:
+ image_list_filename = None
+ if os.path.isdir(deploy_dir):
+ image_list_filename = utils.get_filename_by_prefix(deploy_dir,
+ 'prestage_images')
if image_list_filename:
- image_list_file = os.path.join(DEPLOY_BASE_DIR, image_list_filename)
+ image_list_file = os.path.join(deploy_dir, image_list_filename)
# include this file in the ansible args:
extra_vars_str += (" image_list_file=%s" % image_list_file)
LOG.debug("prestage images list file: %s", image_list_file)
@@ -480,9 +410,9 @@ def prestage_images(context, subcloud, payload):
# There are only two scenarios where we want to run ansible
# for prestaging images:
- # 1. reinstall
- # 2. upgrade, with supplied image list
- if not upgrade or (upgrade and image_list_file):
+ # 1. local
+ # 2. remote, with supplied image list
+ if local or ((not local) and image_list_file):
# Ansible inventory filename for the specified subcloud
ansible_subcloud_inventory_file = \
utils.get_ansible_filename(subcloud.name,
@@ -497,8 +427,9 @@ def prestage_images(context, subcloud, payload):
consts.PRESTAGE_STATE_IMAGES,
payload['sysadmin_password'],
payload['oam_floating_ip'],
+ prestage_software_version,
ansible_subcloud_inventory_file,
timeout_seconds=CONF.playbook_timeout * 2)
else:
- LOG.info("Skipping ansible prestage images step, upgrade: %s,"
- " image_list_file: %s", upgrade, image_list_file)
+ LOG.info("Skipping ansible prestage images step, is_local: %s,"
+ " image_list_file: %s", local, image_list_file)
diff --git a/distributedcloud/dcmanager/common/utils.py b/distributedcloud/dcmanager/common/utils.py
index fcc4fc9e6..e32417b4d 100644
--- a/distributedcloud/dcmanager/common/utils.py
+++ b/distributedcloud/dcmanager/common/utils.py
@@ -456,6 +456,16 @@ def get_loads_for_patching(loads):
return [load.software_version for load in loads if load.state in valid_states]
+def get_loads_for_prestage(loads):
+ """Filter the loads that can be prestaged. Return their software versions"""
+ valid_states = [
+ consts.ACTIVE_LOAD_STATE,
+ consts.IMPORTED_LOAD_STATE,
+ consts.INACTIVE_LOAD_STATE
+ ]
+ return [load.software_version for load in loads if load.state in valid_states]
+
+
def subcloud_get_by_ref(context, subcloud_ref):
"""Handle getting a subcloud by either name, or ID
@@ -787,6 +797,26 @@ def is_subcloud_healthy(subcloud_name):
return False
+def get_systemcontroller_installed_loads():
+
+ try:
+ os_client = OpenStackDriver(
+ region_name=dccommon_consts.SYSTEM_CONTROLLER_NAME,
+ region_clients=None)
+ except Exception:
+ LOG.exception("Failed to get keystone client for %s",
+ dccommon_consts.SYSTEM_CONTROLLER_NAME)
+ raise
+
+ ks_client = os_client.keystone_client
+ sysinv_client = SysinvClient(
+ dccommon_consts.SYSTEM_CONTROLLER_NAME, ks_client.session,
+ endpoint=ks_client.endpoint_cache.get_endpoint('sysinv'))
+
+ loads = sysinv_client.get_loads()
+ return get_loads_for_prestage(loads)
+
+
def get_certificate_from_secret(secret_name, secret_ns):
"""Get certificate from k8s secret
diff --git a/distributedcloud/dcmanager/manager/subcloud_manager.py b/distributedcloud/dcmanager/manager/subcloud_manager.py
index 7a3758a69..568fb92ab 100644
--- a/distributedcloud/dcmanager/manager/subcloud_manager.py
+++ b/distributedcloud/dcmanager/manager/subcloud_manager.py
@@ -111,7 +111,6 @@ TRANSITORY_STATES = {
consts.DEPLOY_STATE_MIGRATING_DATA: consts.DEPLOY_STATE_DATA_MIGRATION_FAILED,
consts.DEPLOY_STATE_PRE_RESTORE: consts.DEPLOY_STATE_RESTORE_PREP_FAILED,
consts.DEPLOY_STATE_RESTORING: consts.DEPLOY_STATE_RESTORE_FAILED,
- consts.PRESTAGE_STATE_PREPARE: consts.PRESTAGE_STATE_FAILED,
consts.PRESTAGE_STATE_PACKAGES: consts.PRESTAGE_STATE_FAILED,
consts.PRESTAGE_STATE_IMAGES: consts.PRESTAGE_STATE_FAILED,
}
@@ -227,10 +226,9 @@ class SubcloudManager(manager.Manager):
"-i", ansible_subcloud_inventory_file,
"--limit", subcloud_name,
"-e", "@%s" % consts.ANSIBLE_OVERRIDES_PATH + "/" +
- subcloud_name + '/' + "install_values.yml"]
- if software_version and software_version != SW_VERSION:
- install_command += [
- "-e", "install_release_version=%s" % software_version]
+ subcloud_name + '/' + "install_values.yml",
+ "-e", "install_release_version=%s" %
+ software_version if software_version else SW_VERSION]
return install_command
def compose_apply_command(self, subcloud_name,
@@ -247,7 +245,9 @@ class SubcloudManager(manager.Manager):
# which overrides to load
apply_command += [
"-e", str("override_files_dir='%s' region_name=%s") % (
- consts.ANSIBLE_OVERRIDES_PATH, subcloud_name)]
+ consts.ANSIBLE_OVERRIDES_PATH, subcloud_name),
+ "-e", "install_release_version=%s" %
+ software_version if software_version else SW_VERSION]
return apply_command
def compose_deploy_command(self, subcloud_name, ansible_subcloud_inventory_file, payload):
@@ -1385,11 +1385,13 @@ class SubcloudManager(manager.Manager):
@staticmethod
def _run_subcloud_install(
context, subcloud, install_command, log_file, payload):
- LOG.info("Preparing remote install of %s" % subcloud.name)
+ software_version = str(payload['software_version'])
+ LOG.info("Preparing remote install of %s, version: %s",
+ subcloud.name, software_version)
db_api.subcloud_update(
context, subcloud.id,
deploy_status=consts.DEPLOY_STATE_PRE_INSTALL,
- software_version=str(payload['software_version']))
+ software_version=software_version)
try:
install = SubcloudInstall(context, subcloud.name)
install.prep(consts.ANSIBLE_OVERRIDES_PATH, payload)
@@ -1399,7 +1401,7 @@ class SubcloudManager(manager.Manager):
context, subcloud.id,
deploy_status=consts.DEPLOY_STATE_PRE_INSTALL_FAILED)
LOG.error(str(e))
- install.cleanup()
+ install.cleanup(software_version)
return False
# Run the remote install playbook
@@ -1419,9 +1421,9 @@ class SubcloudManager(manager.Manager):
context, subcloud.id,
deploy_status=consts.DEPLOY_STATE_INSTALL_FAILED,
error_description=msg[0:consts.ERROR_DESCRIPTION_LENGTH])
- install.cleanup()
+ install.cleanup(software_version)
return False
- install.cleanup()
+ install.cleanup(software_version)
LOG.info("Successfully installed %s" % subcloud.name)
return True
diff --git a/distributedcloud/dcmanager/orchestrator/prestage_orch_thread.py b/distributedcloud/dcmanager/orchestrator/prestage_orch_thread.py
index ed033baa2..33fc8f783 100644
--- a/distributedcloud/dcmanager/orchestrator/prestage_orch_thread.py
+++ b/distributedcloud/dcmanager/orchestrator/prestage_orch_thread.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2022 Wind River Systems, Inc.
+# Copyright (c) 2022-2023 Wind River Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -30,8 +30,6 @@ class PrestageOrchThread(OrchThread):
STATE_OPERATORS = {
consts.STRATEGY_STATE_PRESTAGE_PRE_CHECK:
states.PrestagePreCheckState,
- consts.STRATEGY_STATE_PRESTAGE_PREPARE:
- states.PrestagePrepareState,
consts.STRATEGY_STATE_PRESTAGE_PACKAGES:
states.PrestagePackagesState,
consts.STRATEGY_STATE_PRESTAGE_IMAGES:
diff --git a/distributedcloud/dcmanager/orchestrator/states/prestage/states.py b/distributedcloud/dcmanager/orchestrator/states/prestage/states.py
index c3ee8ed2f..d484efff1 100644
--- a/distributedcloud/dcmanager/orchestrator/states/prestage/states.py
+++ b/distributedcloud/dcmanager/orchestrator/states/prestage/states.py
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2022 Wind River Systems, Inc.
+# Copyright (c) 2022-2023 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
@@ -51,7 +51,7 @@ class PrestagePreCheckState(PrestageState):
def __init__(self, region_name):
super(PrestagePreCheckState, self).__init__(
- next_state=consts.STRATEGY_STATE_PRESTAGE_PREPARE,
+ next_state=consts.STRATEGY_STATE_PRESTAGE_PACKAGES,
region_name=region_name)
@utils.synchronized('prestage-update-extra-args', external=True)
@@ -85,18 +85,14 @@ class PrestagePreCheckState(PrestageState):
'sysadmin_password': extra_args['sysadmin_password'],
'force': extra_args['force']
}
+ if extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION):
+ payload.update({consts.PRESTAGE_REQUEST_RELEASE:
+ extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION)})
try:
oam_floating_ip = prestage.validate_prestage(
strategy_step.subcloud, payload)
-
self._update_oam_floating_ip(strategy_step, oam_floating_ip)
- if strategy_step.stage == 1:
- # Note: this cleanup happens for every subcloud, but they are all
- # processed before moving on to the next strategy step
- # TODO(kmacleod) although this is a quick check, it is
- # synchronized, so we may want to figure out a better
- # way to only run this once
- prestage.cleanup_failed_preparation()
+
prestage.prestage_start(self.context, strategy_step.subcloud.id)
except exceptions.PrestagePreCheckFailedException as ex:
@@ -112,26 +108,6 @@ class PrestagePreCheckState(PrestageState):
self.info_log(strategy_step, "Pre-check pass")
-class PrestagePrepareState(PrestageState):
- """Perform prepare operation"""
-
- def __init__(self, region_name):
- super(PrestagePrepareState, self).__init__(
- next_state=consts.STRATEGY_STATE_PRESTAGE_PACKAGES,
- region_name=region_name)
-
- def _do_state_action(self, strategy_step):
- extra_args = utils.get_sw_update_strategy_extra_args(self.context)
- payload = {
- 'sysadmin_password': extra_args['sysadmin_password'],
- 'oam_floating_ip':
- extra_args['oam_floating_ip_dict'][strategy_step.subcloud.name],
- 'force': extra_args['force']
- }
- prestage.prestage_prepare(self.context, strategy_step.subcloud, payload)
- self.info_log(strategy_step, "Prepare finished")
-
-
class PrestagePackagesState(PrestageState):
"""Perform prestage packages operation"""
@@ -148,6 +124,9 @@ class PrestagePackagesState(PrestageState):
extra_args['oam_floating_ip_dict'][strategy_step.subcloud.name],
'force': extra_args['force']
}
+ if extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION):
+ payload.update({consts.PRESTAGE_REQUEST_RELEASE:
+ extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION)})
prestage.prestage_packages(self.context,
strategy_step.subcloud, payload)
self.info_log(strategy_step, "Packages finished")
@@ -169,6 +148,9 @@ class PrestageImagesState(PrestageState):
extra_args['oam_floating_ip_dict'][strategy_step.subcloud.name],
'force': extra_args['force']
}
+ if extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION):
+ payload.update({consts.PRESTAGE_REQUEST_RELEASE:
+ extra_args.get(consts.PRESTAGE_SOFTWARE_VERSION)})
prestage.prestage_images(self.context, strategy_step.subcloud, payload)
self.info_log(strategy_step, "Images finished")
prestage.prestage_complete(self.context, strategy_step.subcloud.id)
diff --git a/distributedcloud/dcmanager/orchestrator/sw_update_manager.py b/distributedcloud/dcmanager/orchestrator/sw_update_manager.py
index cd35d9dde..32d64e56b 100644
--- a/distributedcloud/dcmanager/orchestrator/sw_update_manager.py
+++ b/distributedcloud/dcmanager/orchestrator/sw_update_manager.py
@@ -20,6 +20,8 @@ import threading
from oslo_log import log as logging
+from tsconfig.tsconfig import SW_VERSION
+
from dccommon import consts as dccommon_consts
from dcmanager.audit import rpcapi as dcmanager_audit_rpc_client
from dcmanager.common import consts
@@ -307,6 +309,12 @@ class SwUpdateManager(manager.Manager):
else:
force = False
+ installed_loads = []
+ software_version = None
+ if payload.get(consts.PRESTAGE_REQUEST_RELEASE):
+ software_version = payload.get(consts.PRESTAGE_REQUEST_RELEASE)
+ installed_loads = utils.get_systemcontroller_installed_loads()
+
# Has the user specified a specific subcloud?
# todo(abailey): refactor this code to use classes
cloud_name = payload.get('cloud_name')
@@ -375,7 +383,8 @@ class SwUpdateManager(manager.Manager):
try:
prestage.global_prestage_validate(payload)
prestage_global_validated = True
- prestage.initial_subcloud_validate(subcloud)
+ prestage.initial_subcloud_validate(
+ subcloud, installed_loads, software_version)
except exceptions.PrestagePreCheckFailedException as ex:
raise exceptions.BadRequest(resource='strategy',
msg=str(ex))
@@ -410,7 +419,9 @@ class SwUpdateManager(manager.Manager):
extra_args = {
consts.EXTRA_ARGS_SYSADMIN_PASSWORD:
payload.get(consts.EXTRA_ARGS_SYSADMIN_PASSWORD),
- consts.EXTRA_ARGS_FORCE: force
+ consts.EXTRA_ARGS_FORCE: force,
+ consts.PRESTAGE_SOFTWARE_VERSION:
+ software_version if software_version else SW_VERSION
}
elif strategy_type == consts.SW_UPDATE_TYPE_PATCH:
upload_only_str = payload.get(consts.EXTRA_ARGS_UPLOAD_ONLY)
@@ -496,7 +507,8 @@ class SwUpdateManager(manager.Manager):
if subcloud.name not in subclouds_processed:
# Do initial validation for subcloud
try:
- prestage.initial_subcloud_validate(subcloud)
+ prestage.initial_subcloud_validate(
+ subcloud, installed_loads, software_version)
except exceptions.PrestagePreCheckFailedException:
LOG.warn("Excluding subcloud from prestage strategy: %s",
subcloud.name)
diff --git a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py
index 825a605ef..f16499e8f 100644
--- a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py
+++ b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_subclouds.py
@@ -1879,6 +1879,38 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest):
mock.ANY)
self.assertEqual(response.status_int, 200)
+ @mock.patch.object(cutils, 'get_systemcontroller_installed_loads')
+ @mock.patch.object(rpc_client, 'ManagerClient')
+ @mock.patch.object(prestage, '_get_system_controller_upgrades')
+ @mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload')
+ def test_prestage_subcloud_invalid_release(self, mock_get_prestage_payload,
+ mock_controller_upgrade,
+ mock_rpc_client,
+ mock_installed_loads):
+ subcloud = fake_subcloud.create_fake_subcloud(self.ctx)
+ subcloud = db_api.subcloud_update(
+ self.ctx, subcloud.id,
+ availability_status=dccommon_consts.AVAILABILITY_ONLINE,
+ management_state=dccommon_consts.MANAGEMENT_MANAGED)
+
+ fake_release = '21.12'
+ mock_installed_loads.return_value = ['22.12']
+
+ fake_password = (base64.b64encode('testpass'.encode("utf-8"))). \
+ decode('ascii')
+ data = {'sysadmin_password': fake_password,
+ 'force': False,
+ 'release': fake_release}
+ mock_controller_upgrade.return_value = list()
+
+ mock_rpc_client().prestage_subcloud.return_value = True
+ mock_get_prestage_payload.return_value = data
+
+ six.assertRaisesRegex(self, webtest.app.AppError, "400 *",
+ self.app.patch_json, FAKE_URL + '/' +
+ str(subcloud.id) + '/prestage',
+ headers=FAKE_HEADERS, params=data)
+
@mock.patch.object(rpc_client, 'ManagerClient')
@mock.patch.object(subclouds.SubcloudsController, '_get_prestage_payload')
@mock.patch.object(prestage, '_get_system_controller_upgrades')
@@ -1925,6 +1957,7 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest):
str(subcloud.id) + '/prestage',
headers=FAKE_HEADERS, params=data)
+ @mock.patch.object(cutils, 'get_systemcontroller_installed_loads')
@mock.patch.object(rpc_client, 'ManagerClient')
@mock.patch.object(prestage, '_get_system_controller_upgrades')
@mock.patch.object(prestage, '_get_prestage_subcloud_info')
@@ -1932,13 +1965,18 @@ class TestSubcloudAPIOther(testroot.DCManagerApiTest):
def test_prestage_subcloud_duplex(self, mock_get_prestage_payload,
mock_prestage_subcloud_info,
mock_controller_upgrade,
- mock_rpc_client):
+ mock_rpc_client,
+ mock_installed_loads):
subcloud = fake_subcloud.create_fake_subcloud(self.ctx)
subcloud = db_api.subcloud_update(
self.ctx, subcloud.id, availability_status=dccommon_consts.AVAILABILITY_ONLINE,
management_state=dccommon_consts.MANAGEMENT_MANAGED)
- fake_password = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii')
+ fake_release = '21.12'
+ mock_installed_loads.return_value = [fake_release]
+
+ fake_password = (base64.b64encode('testpass'.encode("utf-8"))).\
+ decode('ascii')
data = {'sysadmin_password': fake_password,
'force': False}
mock_controller_upgrade.return_value = list()
diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py
index 38725f5ab..d8c888091 100644
--- a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py
+++ b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py
@@ -11,7 +11,7 @@
# License for the specific language governing permissions and limitations
# under the License.
#
-
+import base64
import copy
import datetime
@@ -40,7 +40,6 @@ from dcmanager.tests.unit.common import fake_subcloud
from dcmanager.tests import utils
from tsconfig.tsconfig import SW_VERSION
-LAST_SW_VERSION_IN_CENTOS = "22.06"
FAKE_PREVIOUS_SW_VERSION = '21.12'
@@ -264,11 +263,15 @@ FAKE_SUBCLOUD_PRESTAGE_PAYLOAD = {
"sysadmin_password": "testpasswd",
}
+FAKE_PRESTAGE_RELEASE = '22.12'
+FAKE_SUBCLOUD_SW_VERSION = '21.12'
+FAKE_PASSWORD = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii')
FAKE_PRESTAGE_PAYLOAD = {
"subcloud_name": "subcloud1",
"oam_floating_ip": "10.10.10.12",
- "sysadmin_password": 'testpassword',
- "force": False
+ "sysadmin_password": FAKE_PASSWORD,
+ "force": False,
+ "release": FAKE_PRESTAGE_RELEASE
}
FAKE_MGMT_IF_UUIDS = [
@@ -1437,7 +1440,8 @@ class TestSubcloudManager(base.DCManagerTestCase):
FAKE_PREVIOUS_SW_VERSION),
'-i', '/var/opt/dc/ansible/subcloud1_inventory.yml',
'--limit', 'subcloud1', '-e',
- "override_files_dir='/var/opt/dc/ansible' region_name=subcloud1"
+ "override_files_dir='/var/opt/dc/ansible' region_name=subcloud1",
+ '-e', "install_release_version=%s" % FAKE_PREVIOUS_SW_VERSION
]
)
@@ -1750,7 +1754,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_run_playbook.assert_called_once()
mock_is_healthy.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_PRE_BACKUP,
updated_subcloud.backup_status)
@@ -1777,7 +1781,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_parallel_group_operation.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_VALIDATE_FAILED,
updated_subcloud.backup_status)
@@ -1804,7 +1808,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_parallel_group_operation.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_VALIDATE_FAILED,
updated_subcloud.backup_status)
@@ -1831,7 +1835,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_parallel_group_operation.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_VALIDATE_FAILED,
updated_subcloud.backup_status)
@@ -1859,7 +1863,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_parallel_group_operation.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_UNKNOWN,
updated_subcloud.backup_status)
@@ -1887,7 +1891,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_parallel_group_operation.assert_called_once()
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
+ # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PACKAGES
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.BACKUP_STATE_UNKNOWN,
updated_subcloud.backup_status)
@@ -2013,25 +2017,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
updated_subcloud.backup_status)
@mock.patch.object(threading.Thread, 'start')
- def test_prestage_subcloud_prepare(self, mock_thread_start):
-
- values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
- subcloud = self.create_subcloud_static(
- self.ctx,
- name='subcloud1',
- deploy_status=consts.DEPLOY_STATE_NONE)
-
- sm = subcloud_manager.SubcloudManager()
- sm.prestage_subcloud(self.ctx, payload=values)
- mock_thread_start.assert_called_once()
-
- # Verify that subcloud has the correct deploy status consts.PRESTAGE_STATE_PREPARE
- updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
- self.assertEqual(consts.PRESTAGE_STATE_PREPARE,
- updated_subcloud.deploy_status)
-
- @mock.patch.object(threading.Thread, 'start')
- def test_prestage_subcloud_prepare_no_subcloud(self, mock_thread_start):
+ def test_prestage_no_subcloud(self, mock_thread_start):
values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
values['subcloud_name'] = 'randomname'
@@ -2048,113 +2034,72 @@ class TestSubcloudManager(base.DCManagerTestCase):
@mock.patch.object(os_path, 'exists')
@mock.patch.object(cutils, 'get_filename_by_prefix')
@mock.patch.object(prestage, '_run_ansible')
- def test_prestage_upgrade_pass(self, mock_run_ansible,
- mock_get_filename_by_prefix,
- mock_file_exists):
+ def test_prestage_remote_pass(self, mock_run_ansible,
+ mock_get_filename_by_prefix,
+ mock_file_exists):
values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
subcloud = self.create_subcloud_static(self.ctx,
name='subcloud1',
deploy_status=consts.DEPLOY_STATE_NONE,
- software_version='18.02')
+ software_version=FAKE_SUBCLOUD_SW_VERSION)
prestage._prestage_standalone_thread(self.ctx, subcloud, payload=values)
mock_run_ansible.return_value = None
- mock_get_filename_by_prefix.return_value = None
- mock_file_exists.return_value = False
+ mock_get_filename_by_prefix.return_value = 'prestage_images_list.txt'
+ mock_file_exists.return_value = True
# Verify that subcloud has the correct deploy status
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.PRESTAGE_STATE_COMPLETE,
updated_subcloud.deploy_status)
- @mock.patch.object(os_path, 'exists')
+ # Verify both of prestage package and image ansible playbooks were called
+ self.assertEqual(mock_run_ansible.call_count, 2)
+ # Verify the "image_list_file" was passed to the prestage image playbook
+ # for the remote prestage
+ self.assertTrue(
+ 'image_list_file' in mock_run_ansible.call_args_list[1].args[1][5])
+ # Verify the prestage request release was passed to the playbooks
+ self.assertTrue(
+ FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5])
+ self.assertTrue(
+ FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5])
+
@mock.patch.object(prestage, '_run_ansible')
- def test_prestage_ansible_failed(self, mock_run_ansible,
- mock_file_exists):
+ def test_prestage_local_pass(self, mock_run_ansible):
values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
subcloud = self.create_subcloud_static(self.ctx,
name='subcloud1',
deploy_status=consts.DEPLOY_STATE_NONE,
- software_version='18.02')
-
- mock_run_ansible.side_effect = FakeException('Test')
- mock_file_exists.return_value = False
- mock_open = mock.mock_open(read_data='test')
- with mock.patch('six.moves.builtins.open', mock_open):
-
- e = self.assertRaises(FakeException,
- prestage._sync_run_prestage_prepare_packages,
- context=self.ctx, subcloud=subcloud, payload=values)
-
- self.assertEqual('Test', str(e))
-
- @mock.patch.object(os_path, 'exists')
- @mock.patch.object(cutils, 'get_filename_by_prefix')
- @mock.patch.object(prestage, '_run_ansible')
- def test_prestage_reinstall_pass(self, mock_run_ansible,
- mock_get_filename_by_prefix,
- mock_file_exists):
-
- values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
- subcloud = self.create_subcloud_static(self.ctx,
- name='subcloud1',
- deploy_status=consts.DEPLOY_STATE_NONE,
- software_version=SW_VERSION)
+ software_version=FAKE_PRESTAGE_RELEASE)
prestage._prestage_standalone_thread(self.ctx, subcloud, payload=values)
mock_run_ansible.return_value = None
- mock_get_filename_by_prefix.return_value = None
- mock_file_exists.return_value = False
# Verify that subcloud has the correct deploy status
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
self.assertEqual(consts.PRESTAGE_STATE_COMPLETE,
updated_subcloud.deploy_status)
- @mock.patch.object(prestage, 'prestage_complete')
+ # Verify both of prestage package and image ansible playbooks were called
+ self.assertEqual(mock_run_ansible.call_count, 2)
+ # Verify the prestage request release was passed to the playbooks
+ self.assertTrue(
+ FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5])
+ self.assertTrue(
+ FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5])
+
@mock.patch.object(prestage, 'prestage_images')
@mock.patch.object(prestage, 'prestage_packages')
@mock.patch.object(cutils, 'delete_subcloud_inventory')
@mock.patch.object(prestage, '_run_ansible')
- def test_prestage_subcloud_prestage_prepare_centos(self,
- mock_run_ansible,
- mock_delete_subcloud_inventory,
- mock_prestage_packages,
- mock_prestage_images,
- mock_prestage_complete):
-
- values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
- subcloud = self.create_subcloud_static(self.ctx,
- name='subcloud1',
- deploy_status=consts.DEPLOY_STATE_NONE)
- current_sw_version = prestage.SW_VERSION
- prestage.SW_VERSION = LAST_SW_VERSION_IN_CENTOS
- prestage._prestage_standalone_thread(self.ctx, subcloud, payload=values)
- mock_run_ansible.return_value = None
- mock_prestage_packages.assert_called_once_with(self.ctx, subcloud, values)
- mock_prestage_images.assert_called_once_with(self.ctx, subcloud, values)
- mock_prestage_complete.assert_called_once_with(self.ctx, subcloud.id)
- mock_delete_subcloud_inventory.return_value = None
-
- # Verify that subcloud has the correct deploy status
- updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
- prestage.SW_VERSION = current_sw_version
- self.assertEqual(consts.PRESTAGE_STATE_PREPARE,
- updated_subcloud.deploy_status)
-
- @mock.patch.object(prestage, 'prestage_complete')
- @mock.patch.object(prestage, 'prestage_images')
- @mock.patch.object(prestage, 'prestage_packages')
- @mock.patch.object(cutils, 'delete_subcloud_inventory')
- @mock.patch.object(prestage, '_run_ansible')
- def test_prestage_subcloud_prestage_prepare_debian(self,
- mock_run_ansible,
- mock_delete_subcloud_inventory,
- mock_prestage_packages,
- mock_prestage_images,
- mock_prestage_complete):
+ def test_prestage_subcloud_complete(self,
+ mock_run_ansible,
+ mock_delete_subcloud_inventory,
+ mock_prestage_packages,
+ mock_prestage_images):
values = copy.copy(FAKE_PRESTAGE_PAYLOAD)
subcloud = self.create_subcloud_static(self.ctx,
@@ -2164,12 +2109,11 @@ class TestSubcloudManager(base.DCManagerTestCase):
mock_run_ansible.return_value = None
mock_prestage_packages.assert_called_once_with(self.ctx, subcloud, values)
mock_prestage_images.assert_called_once_with(self.ctx, subcloud, values)
- mock_prestage_complete.assert_called_once_with(self.ctx, subcloud.id)
mock_delete_subcloud_inventory.return_value = None
- # Verify that subcloud has the correct deploy status
+ # Verify that subcloud has the "prestage-complete" deploy status
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
- self.assertEqual(consts.DEPLOY_STATE_NONE,
+ self.assertEqual(consts.PRESTAGE_STATE_COMPLETE,
updated_subcloud.deploy_status)
def test_get_cached_regionone_data(self):
diff --git a/distributedcloud/dcmanager/tests/unit/orchestrator/states/prestage/test_states.py b/distributedcloud/dcmanager/tests/unit/orchestrator/states/prestage/test_states.py
index e46d21336..b19443414 100644
--- a/distributedcloud/dcmanager/tests/unit/orchestrator/states/prestage/test_states.py
+++ b/distributedcloud/dcmanager/tests/unit/orchestrator/states/prestage/test_states.py
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2022 Wind River Systems, Inc.
+# Copyright (c) 2022-2023 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
@@ -17,7 +17,6 @@ from dcmanager.common.consts import STRATEGY_STATE_FAILED
from dcmanager.common.consts import STRATEGY_STATE_PRESTAGE_IMAGES
from dcmanager.common.consts import STRATEGY_STATE_PRESTAGE_PACKAGES
from dcmanager.common.consts import STRATEGY_STATE_PRESTAGE_PRE_CHECK
-from dcmanager.common.consts import STRATEGY_STATE_PRESTAGE_PREPARE
from dcmanager.db.sqlalchemy import api as db_api
@@ -58,27 +57,6 @@ class TestPrestagePreCheckState(TestPrestage):
self.strategy_step = \
self.setup_strategy_step(self.subcloud.id, STRATEGY_STATE_PRESTAGE_PRE_CHECK)
- def test_prestage_prepare(self):
- next_state = STRATEGY_STATE_PRESTAGE_PREPARE
- # Update the subcloud to have deploy state as "complete"
- db_api.subcloud_update(self.ctx,
- self.subcloud.id,
- deploy_status=DEPLOY_STATE_DONE)
-
- extra_args = {"sysadmin_password": FAKE_PASSWORD,
- "force": False,
- 'oam_floating_ip': OAM_FLOATING_IP}
- self.strategy = fake_strategy.create_fake_strategy(
- self.ctx,
- self.DEFAULT_STRATEGY_TYPE,
- extra_args=extra_args)
-
- # invoke the strategy state operation on the orch thread
- self.worker.perform_state_action(self.strategy_step)
-
- # Verify the transition to the expected next state
- self.assert_step_updated(self.strategy_step.subcloud_id, next_state)
-
def test_prestage_prepare_no_extra_args(self):
next_state = STRATEGY_STATE_FAILED
# Update the subcloud to have deploy state as "complete"
@@ -154,73 +132,6 @@ class TestPrestagePreCheckState(TestPrestage):
self.assertTrue('test' in str(new_strategy_step.details))
-class TestPrestagePrepareState(TestPrestage):
-
- def setUp(self):
- super(TestPrestagePrepareState, self).setUp()
-
- # Add the subcloud being processed by this unit test
- # The subcloud is online, managed with deploy_state 'installed'
- self.subcloud = self.setup_subcloud()
-
- p = mock.patch('dcmanager.common.prestage.prestage_prepare')
- self.mock_prestage_prepare = p.start()
- self.addCleanup(p.stop)
-
- # Add the strategy_step state being processed by this unit test
- self.strategy_step = \
- self.setup_strategy_step(self.subcloud.id, STRATEGY_STATE_PRESTAGE_PREPARE)
-
- def test_prestage_prestage_prepare(self):
-
- next_state = STRATEGY_STATE_PRESTAGE_PACKAGES
- # Update the subcloud to have deploy state as "complete"
- db_api.subcloud_update(self.ctx,
- self.subcloud.id,
- deploy_status=DEPLOY_STATE_DONE)
-
- oam_floating_ip_dict = {
- self.subcloud.name: OAM_FLOATING_IP
- }
- extra_args = {"sysadmin_password": FAKE_PASSWORD,
- "force": False,
- "oam_floating_ip_dict": oam_floating_ip_dict}
- self.strategy = fake_strategy.create_fake_strategy(
- self.ctx,
- self.DEFAULT_STRATEGY_TYPE,
- extra_args=extra_args)
-
- # invoke the strategy state operation on the orch thread
- self.worker.perform_state_action(self.strategy_step)
-
- # Verify the transition to the expected next state
- self.assert_step_updated(self.strategy_step.subcloud_id, next_state)
-
- def test_prestage_prestage_prepare_no_password(self):
-
- next_state = STRATEGY_STATE_FAILED
- # Update the subcloud to have deploy state as "complete"
- db_api.subcloud_update(self.ctx,
- self.subcloud.id,
- deploy_status=DEPLOY_STATE_DONE)
-
- oam_floating_ip_dict = {
- self.subcloud.name: OAM_FLOATING_IP
- }
- extra_args = {"force": False,
- "oam_floating_ip_dict": oam_floating_ip_dict}
- self.strategy = fake_strategy.create_fake_strategy(
- self.ctx,
- self.DEFAULT_STRATEGY_TYPE,
- extra_args=extra_args)
-
- # invoke the strategy state operation on the orch thread
- self.worker.perform_state_action(self.strategy_step)
-
- # Verify the transition to the expected next state
- self.assert_step_updated(self.strategy_step.subcloud_id, next_state)
-
-
class TestPrestagePackageState(TestPrestage):
def setUp(self):
diff --git a/distributedcloud/dcmanager/tests/unit/orchestrator/test_sw_update_manager.py b/distributedcloud/dcmanager/tests/unit/orchestrator/test_sw_update_manager.py
index eebc5acda..23079d6b3 100644
--- a/distributedcloud/dcmanager/tests/unit/orchestrator/test_sw_update_manager.py
+++ b/distributedcloud/dcmanager/tests/unit/orchestrator/test_sw_update_manager.py
@@ -23,6 +23,7 @@ from dcmanager.common import consts
from dcmanager.common import context
from dcmanager.common import exceptions
from dcmanager.common import prestage
+from dcmanager.common import utils as cutils
from dcmanager.db.sqlalchemy import api as db_api
from dcmanager.orchestrator import sw_update_manager
@@ -639,13 +640,15 @@ class TestSwUpdateManager(base.DCManagerTestCase):
self.assertEqual(subcloud_ids, subcloud_id_processed)
self.assertEqual(stage, stage_processed)
+ @mock.patch.object(cutils, 'get_systemcontroller_installed_loads')
@mock.patch.object(prestage, 'initial_subcloud_validate')
@mock.patch.object(prestage, '_get_system_controller_upgrades')
@mock.patch.object(sw_update_manager, 'PatchOrchThread')
def test_create_sw_prestage_strategy_parallel(self,
mock_patch_orch_thread,
mock_controller_upgrade,
- mock_initial_subcloud_validate):
+ mock_initial_subcloud_validate,
+ mock_installed_loads):
# Create fake subclouds and respective status
# Subcloud1 will be prestaged
@@ -679,6 +682,9 @@ class TestSwUpdateManager(base.DCManagerTestCase):
data = copy.copy(FAKE_SW_PRESTAGE_DATA)
fake_password = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii')
data['sysadmin_password'] = fake_password
+ fake_release = '21.12'
+ data[consts.PRESTAGE_REQUEST_RELEASE] = fake_release
+ mock_installed_loads.return_value = [fake_release]
um = sw_update_manager.SwUpdateManager()
strategy_dict = um.create_sw_update_strategy(self.ctxt, payload=data)
@@ -690,6 +696,9 @@ class TestSwUpdateManager(base.DCManagerTestCase):
self.assertEqual(strategy_dict['max-parallel-subclouds'], 2)
self.assertEqual(strategy_dict['subcloud-apply-type'],
consts.SUBCLOUD_APPLY_TYPE_PARALLEL)
+ self.assertEqual(fake_release,
+ strategy_dict['extra-args'].get(
+ consts.PRESTAGE_SOFTWARE_VERSION))
# Verify the strategy step list
subcloud_ids = [1, 3, 5, 6, 7]