Synchronize install values with secondary subclouds
This commit ensures that the install values of secondary subclouds on the non-primary site are synchronized with those on the primary site. This synchronization enables users to perform operations requiring the 'data_install' field without having to execute 'dcmanager subcloud update --install-values <install_values.yaml> <secondary_subcloud_ref>' beforehand. Validation of install values is skipped on the secondary side when the request comes from a peer system. This is because the user may not intend to perform operations requiring the 'data_install' field. The values are already validated on the primary site. Without skipping validation, the sync operation could fail if the non-primary site does not have the load already imported. Test Plan: 1. PASS: After the initial association sync, verify that secondary subclouds created on the non-primary site have the 'data_install' field synchronized with subclouds on the primary site. 2. PASS: After the initial sync is completed, update the install values on the primary site and re-sync. Verify that the new install values were synchronized between peers. 3. PASS: Run the previous test when the non-primary site does not have the load imported and verify that the sync still works. 4. PASS: Delete the 'data_install' field from the secondary subcloud on the non-primary site and then run 'dcmanager peer-group-association sync' on the primary site. Verify that the secondary subcloud 'data_install' field is updated with the correct value. 5. PASS: On the non-primary site, use 'dcmanager subcloud update' to update the install values, verifying that the command still works as expected. 6. PASS: Repeat the previous test, but this time using an install values file without required fields, and verify that the operation fails during the install values validation. Closes-Bug: 2049651 Change-Id: I4dbaaa16e40f6a214bbb93f9e48f614c10de7d42 Signed-off-by: Gustavo Herzmann <gustavo.herzmann@windriver.com>
This commit is contained in:
parent
4438b8fd55
commit
5fcf4aee3c
|
@ -231,7 +231,8 @@ class DcmanagerClient(base.DriverBase):
|
|||
fields.update(data)
|
||||
enc = MultipartEncoder(fields=fields)
|
||||
headers = {"X-Auth-Token": self.token,
|
||||
"Content-Type": enc.content_type}
|
||||
"Content-Type": enc.content_type,
|
||||
"User-Agent": consts.DCMANAGER_V1_HTTP_AGENT}
|
||||
response = requests.post(url, headers=headers, data=enc,
|
||||
timeout=self.timeout)
|
||||
|
||||
|
|
|
@ -754,7 +754,15 @@ class SubcloudsController(object):
|
|||
peer_group_id = pgrp.id
|
||||
|
||||
if consts.INSTALL_VALUES in payload:
|
||||
psd_common.validate_install_values(payload, subcloud)
|
||||
# install_values of secondary subclouds are validated on
|
||||
# peer site
|
||||
if utils.subcloud_is_secondary_state(subcloud.deploy_status) \
|
||||
and utils.is_req_from_another_dc(request):
|
||||
LOG.debug("Skipping install_values validation for subcloud "
|
||||
f"{subcloud.name}. Subcloud is secondary and "
|
||||
"request is from a peer site.")
|
||||
else:
|
||||
psd_common.validate_install_values(payload, subcloud)
|
||||
payload['data_install'] = json.dumps(payload[consts.INSTALL_VALUES])
|
||||
|
||||
try:
|
||||
|
|
|
@ -1004,7 +1004,14 @@ def pre_deploy_create(payload: dict, context: RequestContext,
|
|||
|
||||
validate_subcloud_config(context, payload)
|
||||
|
||||
validate_install_values(payload)
|
||||
# install_values of secondary subclouds are validated on peer site
|
||||
if consts.DEPLOY_STATE_SECONDARY in payload and \
|
||||
utils.is_req_from_another_dc(request):
|
||||
LOG.debug("Skipping install_values validation for subcloud "
|
||||
f"{payload['name']}. Subcloud is secondary and "
|
||||
"request is from a peer site.")
|
||||
else:
|
||||
validate_install_values(payload)
|
||||
|
||||
validate_k8s_version(payload)
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#
|
||||
|
||||
import base64
|
||||
from contextlib import nullcontext
|
||||
import functools
|
||||
import json
|
||||
import tempfile
|
||||
|
@ -25,10 +26,12 @@ from dcmanager.common.i18n import _
|
|||
from dcmanager.common import manager
|
||||
from dcmanager.common import utils
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.db.sqlalchemy import models
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
TEMP_BOOTSTRAP_PREFIX = 'peer_subcloud_bootstrap_yaml'
|
||||
TEMP_INSTALL_PREFIX = 'peer_subcloud_install_yaml'
|
||||
MAX_PARALLEL_SUBCLOUD_SYNC = 10
|
||||
MAX_PARALLEL_SUBCLOUD_DELETE = 10
|
||||
VERIFY_SUBCLOUD_SYNC_VALID = 'valid'
|
||||
|
@ -168,12 +171,16 @@ class SystemPeerManager(manager.Manager):
|
|||
|
||||
return failed_subclouds, error_msg
|
||||
|
||||
def _add_or_update_subcloud(self, dc_client, peer_controller_gateway_ip,
|
||||
dc_peer_pg_id, subcloud):
|
||||
def _add_or_update_subcloud(self, dc_client: DcmanagerClient,
|
||||
peer_controller_gateway_ip: str,
|
||||
dc_peer_pg_id: int,
|
||||
subcloud: models.Subcloud):
|
||||
"""Add or update subcloud on peer site in parallel."""
|
||||
with tempfile.NamedTemporaryFile(prefix=TEMP_BOOTSTRAP_PREFIX,
|
||||
suffix=".yaml",
|
||||
mode='w') as temp_file:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
prefix=TEMP_BOOTSTRAP_PREFIX, suffix=".yaml", mode="w"
|
||||
) as temp_bootstrap_file, tempfile.NamedTemporaryFile(
|
||||
prefix=TEMP_INSTALL_PREFIX, suffix=".yaml", mode="w"
|
||||
) if subcloud.data_install else nullcontext() as temp_install_file:
|
||||
subcloud_name = subcloud.get('name')
|
||||
region_name = subcloud.get('region_name')
|
||||
rehome_data = json.loads(subcloud.rehome_data)
|
||||
|
@ -182,22 +189,29 @@ class SystemPeerManager(manager.Manager):
|
|||
subcloud_payload['systemcontroller_gateway_address'] = \
|
||||
peer_controller_gateway_ip
|
||||
|
||||
yaml.dump(subcloud_payload, temp_file)
|
||||
yaml.dump(subcloud_payload, temp_bootstrap_file)
|
||||
|
||||
files = {"bootstrap_values": temp_file.name}
|
||||
files = {consts.BOOTSTRAP_VALUES: temp_bootstrap_file.name}
|
||||
data = {
|
||||
"bootstrap-address": subcloud_payload['bootstrap-address'],
|
||||
consts.BOOTSTRAP_ADDRESS: subcloud_payload[
|
||||
consts.BOOTSTRAP_ADDRESS],
|
||||
"region_name": subcloud.region_name,
|
||||
"location": subcloud.location,
|
||||
"description": subcloud.description
|
||||
}
|
||||
|
||||
if temp_install_file:
|
||||
data_install = json.loads(subcloud.data_install)
|
||||
yaml.dump(data_install, temp_install_file)
|
||||
files[consts.INSTALL_VALUES] = temp_install_file.name
|
||||
|
||||
try:
|
||||
# Sync subcloud information to peer site
|
||||
peer_subcloud = self.get_peer_subcloud(dc_client, subcloud_name)
|
||||
if peer_subcloud:
|
||||
dc_peer_subcloud = dc_client.update_subcloud(subcloud_name,
|
||||
files, data)
|
||||
dc_peer_subcloud = dc_client.update_subcloud(region_name,
|
||||
files, data,
|
||||
is_region_name=True)
|
||||
LOG.info(f"Updated Subcloud {dc_peer_subcloud.get('name')} "
|
||||
"(region_name: "
|
||||
f"{dc_peer_subcloud.get('region-name')}) on peer "
|
||||
|
|
|
@ -218,19 +218,22 @@ class TestSystemPeerManager(base.DCManagerTestCase):
|
|||
}
|
||||
}
|
||||
# Create local dc subcloud1 mock data in database
|
||||
data_install = json.dumps(fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES)
|
||||
self.create_subcloud_with_pg_static(
|
||||
self.ctx,
|
||||
peer_group_id=peer_group.id,
|
||||
rehome_data=json.dumps(rehome_data),
|
||||
name='subcloud1',
|
||||
region_name='subcloud1')
|
||||
region_name='subcloud1',
|
||||
data_install=data_install)
|
||||
# Create local dc subcloud2 mock data in database
|
||||
self.create_subcloud_with_pg_static(
|
||||
self.ctx,
|
||||
peer_group_id=peer_group.id,
|
||||
rehome_data=json.dumps(rehome_data),
|
||||
name='subcloud2',
|
||||
region_name='subcloud2')
|
||||
region_name='subcloud2',
|
||||
data_install=None)
|
||||
peer_subcloud1 = FAKE_SITE1_SUBCLOUD1_DATA
|
||||
peer_subcloud2 = FAKE_SITE1_SUBCLOUD2_DATA
|
||||
peer_subcloud3 = FAKE_SITE1_SUBCLOUD3_DATA
|
||||
|
@ -256,7 +259,7 @@ class TestSystemPeerManager(base.DCManagerTestCase):
|
|||
mock.call(peer_subcloud3.get('name'))
|
||||
])
|
||||
mock_dc_client().update_subcloud.assert_has_calls([
|
||||
mock.call('subcloud1', mock.ANY, mock.ANY),
|
||||
mock.call('subcloud1', mock.ANY, mock.ANY, is_region_name=True),
|
||||
mock.call(FAKE_SITE1_SUBCLOUD1_REGION_NAME, files=None,
|
||||
data={'peer_group': str(FAKE_SITE1_PEER_GROUP_ID)},
|
||||
is_region_name=True),
|
||||
|
|
Loading…
Reference in New Issue