diff --git a/distributedcloud-client/pylint.rc b/distributedcloud-client/.pylintrc similarity index 61% rename from distributedcloud-client/pylint.rc rename to distributedcloud-client/.pylintrc index a258977..60b7b79 100644 --- a/distributedcloud-client/pylint.rc +++ b/distributedcloud-client/.pylintrc @@ -1,6 +1,6 @@ [MASTER] # Specify a configuration file. -rcfile=pylint.rc +rcfile=.pylintrc # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may @@ -11,7 +11,7 @@ extension-pkg-whitelist=greenlet #init-hook= # Add files or directories to the blacklist. They should be base names, not paths. -ignore=tests +ignore= # Pickle collected data for later comparisons. persistent=yes @@ -20,136 +20,29 @@ persistent=yes # usually to register additional checkers. load-plugins= -# TODO: Bo remove the following once we no longer use python2 -# Remove option files-output, bad-functions, max-branches -# Remove E1606, E1607, E1608, E1610, E1611, E1641, W0403, W1112, W1641 -# Remove enable section since python3 checker will then by default [MESSAGES CONTROL] -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. -# -# Python3 checker: -# -# E1601: print-statement -# E1602: parameter-unpacking -# E1603: unpacking-in-except -# E1604: old-raise-syntax -# E1605: backtick -# E1606: long-suffix -# E1607: old-ne-operator -# E1608: old-octal-literal -# E1609: import-star-module-level -# E1610: non-ascii-bytes-literal -# E1611: invalid-unicode-literal -# W1601: apply-builtin -# W1602: basestring-builtin -# W1603: buffer-builtin -# W1604: cmp-builtin -# W1605: coerce-builtin -# W1606: execfile-builtin -# W1607: file-builtin -# W1608: long-builtin -# W1609: raw_input-builtin -# W1610: reduce-builtin -# W1611: standarderror-builtin -# W1612: unicode-builtin -# W1613: xrange-builtin -# W1614: coerce-method -# W1615: delslice-method -# W1616: getslice-method -# W1617: setslice-method -# W1618: no-absolute-import -# W1619: old-division -# W1620: dict-iter-method -# W1621: dict-view-method -# W1622: next-method-called -# W1623: metaclass-assignment -# W1624: indexing-exception -# W1625: raising-string -# W1626: reload-builtin -# W1627: oct-method -# W1628: hex-method -# W1629: nonzero-method -# W1630: cmp-method -# W1632: input-builtin -# W1633: round-builtin -# W1634: intern-builtin -# W1635: unichr-builtin -# W1636: map-builtin-not-iterating -# W1637: zip-builtin-not-iterating -# W1638: range-builtin-not-iterating -# W1639: filter-builtin-not-iterating -# W1640: using-cmp-argument -# W1641: eq-without-hash -# W1642: div-method -# W1643: idiv-method -# W1644: rdiv-method -# W1645: exception-message-attribute -# W1646: invalid-str-codec -# W1647: sys-max-int -# W1648: bad-python3-import -# W1649: deprecated-string-function -# W1650: deprecated-str-translate-call -# W1651: deprecated-itertools-function -# W1652: deprecated-types-field -# W1653: next-method-defined -# W1654: dict-items-not-iterating -# W1655: dict-keys-not-iterating -# W1656: dict-values-not-iterating -# W1657: deprecated-operator-function -# W1658: deprecated-urllib-function -# W1659: xreadlines-attribute -# W1660: deprecated-sys-function -# W1661: exception-escape -# W1662: comprehension-escape -enable=E1601,E1602,E1603,E1604,E1605,E1606,E1607,E1608,E1609,E1610,E1611,W1601, - W1602,W1603,W1604,W1605,W1606,W1607,W1608,W1609,W1610,W1611,W1612,W1613, - W1614,W1615,W1616,W1617,W1618,W1619,W1620,W1621,W1622,W1623,W1624,W1625, - W1626,W1627,W1628,W1629,W1630,W1632,W1633,W1634,W1635,W1636,W1637,W1638, - W1639,W1640,W1641,W1642,W1643,W1644,W1645,W1646,W1647,W1648,W1649,W1650, - W1651,W1652,W1653,W1654,W1655,W1656,W1657,W1658,W1659,W1660,W1661,W1662 - # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). -# http://pylint.pycqa.org/en/latest/technical_reference/features.html -# +# https://pylint.readthedocs.io/en/latest/user_guide/output.html#source-code-analysis-section # R detect Refactor for a "good practice" metric violation -# C detect Convention for coding standard violation -# E1102: not-callable -# W0107: unnecessary-pass -# W0212: protected-access -# W0403: relative-import (typically caused by six) -# W0603: global-statement -# W0613: unused-argument -# W0622: redefined-builtin -# W0703: broad-except -# W1113: keyword-arg-before-vararg -# W1505: deprecated-method -# W1618: no-absolute-import -# -# Following alarms are suppressed because python2 does not support the new pylint -# suggested syntax change. Need to unsuppress once we move away from python2 -# W0707: raise-missing-from -# W1514: unspecified-encoding -# -disable=C,R, - E1102,W0107,W0212,W0403,W0612,W0613,W0603, - W0622,W0703,W0707,W1112,W1505,W1514,W1618 +# W detect Warning for stylistic problems, or minor programming issues +disable=R, + broad-except, + invalid-name, + missing-class-docstring, + missing-function-docstring, + missing-module-docstring, + protected-access, + too-many-lines, [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html output-format=text -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - # Tells whether to display a full report or only the messages reports=no @@ -205,9 +98,6 @@ generated-members=REQUEST,acl_users,aq_parent [BASIC] -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ @@ -297,7 +187,7 @@ max-locals=15 max-returns=6 # Maximum number of branch for function / method body -max-branchs=12 +max-branches=12 # Maximum number of statements in function / method body max-statements=50 diff --git a/distributedcloud-client/dcmanagerclient/api/base.py b/distributedcloud-client/dcmanagerclient/api/base.py index 7d1396a..fd84988 100644 --- a/distributedcloud-client/dcmanagerclient/api/base.py +++ b/distributedcloud-client/dcmanagerclient/api/base.py @@ -14,60 +14,82 @@ # limitations under the License. # -from bs4 import BeautifulSoup import json +from bs4 import BeautifulSoup + from dcmanagerclient import exceptions class Resource(object): # This will be overridden by the actual resource - resource_name = 'Something' + resource_name = "Something" class Subcloud(Resource): - resource_name = 'subclouds' + resource_name = "subclouds" _PAYLOAD_NAME_MAP = { - 'id': 'subcloud_id', - 'name': 'name', - 'description': 'description', - 'location': 'location', - 'software-version': 'software_version', - 'management-state': 'management_state', - 'availability-status': 'availability_status', - 'deploy-status': 'deploy_status', - 'error-description': 'error_description', - 'management-subnet': 'management_subnet', - 'management-start-ip': 'management_start_ip', - 'management-end-ip': 'management_end_ip', - 'management-gateway-ip': 'management_gateway_ip', - 'systemcontroller-gateway-ip': 'systemcontroller_gateway_ip', - 'created-at': 'created_at', - 'updated-at': 'updated_at', - 'group_id': 'group_id', - 'peer_group_id': 'peer_group_id', - 'rehome_data': 'rehome_data', - 'sync_status': 'sync_status', - 'endpoint_sync_status': 'endpoint_sync_status', - 'backup-status': 'backup_status', - 'backup-datetime': 'backup_datetime', - 'prestage-software-version': 'prestage_software_version', - 'prestage-status': 'prestage_status', - 'prestage-versions': 'prestage_versions', - 'region-name': 'region_name' - } + "id": "subcloud_id", + "name": "name", + "description": "description", + "location": "location", + "software-version": "software_version", + "management-state": "management_state", + "availability-status": "availability_status", + "deploy-status": "deploy_status", + "error-description": "error_description", + "management-subnet": "management_subnet", + "management-start-ip": "management_start_ip", + "management-end-ip": "management_end_ip", + "management-gateway-ip": "management_gateway_ip", + "systemcontroller-gateway-ip": "systemcontroller_gateway_ip", + "created-at": "created_at", + "updated-at": "updated_at", + "group_id": "group_id", + "peer_group_id": "peer_group_id", + "rehome_data": "rehome_data", + "sync_status": "sync_status", + "endpoint_sync_status": "endpoint_sync_status", + "backup-status": "backup_status", + "backup-datetime": "backup_datetime", + "prestage-software-version": "prestage_software_version", + "prestage-status": "prestage_status", + "prestage-versions": "prestage_versions", + "region-name": "region_name", + } - def __init__(self, manager, subcloud_id, name, description, location, - software_version, management_state, availability_status, - deploy_status, management_subnet, management_start_ip, - management_end_ip, management_gateway_ip, - systemcontroller_gateway_ip, created_at, updated_at, - group_id, sync_status="unknown", endpoint_sync_status=None, - backup_status=None, backup_datetime=None, - error_description=None, prestage_software_version=None, - peer_group_id=None, rehome_data=None, region_name=None, - prestage_status=None, prestage_versions=None): + def __init__( + self, + manager, + subcloud_id, + name, + description, + location, + software_version, + management_state, + availability_status, + deploy_status, + management_subnet, + management_start_ip, + management_end_ip, + management_gateway_ip, + systemcontroller_gateway_ip, + created_at, + updated_at, + group_id, + sync_status="unknown", + endpoint_sync_status=None, + backup_status=None, + backup_datetime=None, + error_description=None, + prestage_software_version=None, + peer_group_id=None, + rehome_data=None, + region_name=None, + prestage_status=None, + prestage_versions=None, + ): if endpoint_sync_status is None: endpoint_sync_status = {} self.manager = manager @@ -104,7 +126,7 @@ class Subcloud(Resource): @classmethod def from_payload(cls, manager, payload): """Returns a class instance based on a single payload.""" - parameters = {'manager': manager} + parameters = {"manager": manager} # Converts payload parameter name to match the class attributes for payload_param, value in payload.items(): @@ -138,12 +160,12 @@ class ResourceManager(object): for resource_data in json_object: resource.append( self.resource_class( # pylint: disable=not-callable - self, - resource_data, - json_object[resource_data])) + self, resource_data, json_object[resource_data] + ) + ) return resource - def _list(self, url, response_key=None): + def _list(self, url, _response_key=None): resp = self.http_client.get(url) if resp.status_code != 200: self._raise_api_exception(resp) @@ -173,14 +195,16 @@ class ResourceManager(object): json_objects = [json_response_key[item] for item in json_response_key] resource = [] for json_object in json_objects: - data = json_object.get('usage') + data = json_object.get("usage") for values in data: resource.append( self.resource_class( # pylint: disable=not-callable self, values, - json_object['limits'][values], - json_object['usage'][values])) + json_object["limits"][values], + json_object["usage"][values], + ) + ) return resource def _delete(self, url): @@ -190,25 +214,26 @@ class ResourceManager(object): def _raise_api_exception(self, resp): error_html = resp.content - soup = BeautifulSoup(error_html, 'html.parser') + soup = BeautifulSoup(error_html, "html.parser") # Get the raw html with get_text, strip out the blank lines on # front and back, then get rid of the first line of error code # so that we are left with just the meaningful error text. try: - line_list = soup.body.get_text().lstrip().rstrip().split('\n')[1:] + line_list = soup.body.get_text().lstrip().rstrip().split("\n")[1:] error_msg = line_list[0].lstrip().rstrip() for line in line_list[1:]: - error_msg += ' ' + line.lstrip().rstrip() + error_msg += " " + line.lstrip().rstrip() except Exception: error_msg = resp.content - raise exceptions.APIException(error_code=resp.status_code, - error_message=error_msg) + raise exceptions.APIException( + error_code=resp.status_code, error_message=error_msg + ) def get_json(response): """Get JSON representation of response.""" - json_field_or_function = getattr(response, 'json', None) + json_field_or_function = getattr(response, "json", None) if callable(json_field_or_function): return response.json() else: diff --git a/distributedcloud-client/dcmanagerclient/api/httpclient.py b/distributedcloud-client/dcmanagerclient/api/httpclient.py index f84aa19..056d9f6 100644 --- a/distributedcloud-client/dcmanagerclient/api/httpclient.py +++ b/distributedcloud-client/dcmanagerclient/api/httpclient.py @@ -1,7 +1,7 @@ # Copyright 2013 - Mirantis, Inc. # Copyright 2016 - StackStorm, Inc. # Copyright 2016 - Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,91 +17,100 @@ # import copy +import logging import os +import osprofiler.web import requests -import logging - -import osprofiler.web - +CONTENT_TYPE = "content-type" LOG = logging.getLogger(__name__) def log_request(func): def decorator(self, *args, **kwargs): resp = func(self, *args, **kwargs) - LOG.debug("HTTP %s %s %d %s", - resp.request.method, resp.url, resp.status_code, resp.text) + LOG.debug( + "HTTP %s %s %d %s", + resp.request.method, + resp.url, + resp.status_code, + resp.text, + ) return resp return decorator class HTTPClient(object): - def __init__(self, base_url, token=None, project_id=None, user_id=None, - cacert=None, insecure=False): + def __init__( + self, + base_url, + token=None, + project_id=None, + user_id=None, + cacert=None, + insecure=False, + ): self.base_url = base_url self.token = token self.project_id = project_id self.user_id = user_id self.ssl_options = {} - if self.base_url.startswith('https'): + if self.base_url.startswith("https"): if cacert and not os.path.exists(cacert): - raise ValueError('Unable to locate cacert file ' - 'at %s.' % cacert) + raise ValueError("Unable to locate cacert file at {cacert}.") if cacert and insecure: - LOG.warning('Client is set to not verify even though ' - 'cacert is provided.') + LOG.warning( + "Client is set to not verify even though cacert is provided." + ) if insecure: - self.ssl_options['verify'] = False + self.ssl_options["verify"] = False else: - self.ssl_options['verify'] = True if not cacert else cacert + self.ssl_options["verify"] = True if not cacert else cacert @log_request def get(self, url, headers=None): - options = self._get_request_options('get', headers) + options = self._get_request_options("get", headers) return requests.get(self.base_url + url, **options) @log_request def post(self, url, body, headers=None): - options = self._get_request_options('post', headers) + options = self._get_request_options("post", headers) return requests.post(self.base_url + url, body, **options) @log_request def put(self, url, body, headers=None): - options = self._get_request_options('put', headers) + options = self._get_request_options("put", headers) return requests.put(self.base_url + url, body, **options) @log_request def patch(self, url, body, headers=None): - options = self._get_request_options('patch', headers) + options = self._get_request_options("patch", headers) return requests.patch(self.base_url + url, body, **options) @log_request def delete(self, url, headers=None): - options = self._get_request_options('delete', headers) + options = self._get_request_options("delete", headers) return requests.delete(self.base_url + url, **options) def _get_request_options(self, method, headers): headers = self._update_headers(headers) - CONTENT_TYPE = 'content-type' - - if method in ['post', 'put', 'patch'] and CONTENT_TYPE not in headers: - content_type = headers.get(CONTENT_TYPE, 'application/json') + if method in ["post", "put", "patch"] and CONTENT_TYPE not in headers: + content_type = headers.get(CONTENT_TYPE, "application/json") headers[CONTENT_TYPE] = content_type options = copy.deepcopy(self.ssl_options) - options['headers'] = headers + options["headers"] = headers return options @@ -109,17 +118,17 @@ class HTTPClient(object): if not headers: headers = {} - token = headers.get('x-auth-token', self.token) + token = headers.get("x-auth-token", self.token) if token: - headers['x-auth-token'] = token + headers["x-auth-token"] = token - project_id = headers.get('X-Project-Id', self.project_id) + project_id = headers.get("X-Project-Id", self.project_id) if project_id: - headers['X-Project-Id'] = project_id + headers["X-Project-Id"] = project_id - user_id = headers.get('X-User-Id', self.user_id) + user_id = headers.get("X-User-Id", self.user_id) if user_id: - headers['X-User-Id'] = user_id + headers["X-User-Id"] = user_id # Add headers for osprofiler. headers.update(osprofiler.web.get_trace_id_headers()) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/client.py b/distributedcloud-client/dcmanagerclient/api/v1/client.py index 0c219b1..2e6279a 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/client.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/client.py @@ -15,11 +15,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import six import keystoneauth1.identity.generic as auth_plugin -from keystoneauth1 import session as ks_session import osprofiler.profiler +import six +from keystoneauth1 import session as ks_session from dcmanagerclient.api import httpclient from dcmanagerclient.api.v1 import alarm_manager as am @@ -48,40 +48,53 @@ _DEFAULT_DCMANAGER_URL = "http://localhost:8119/v1.0" class Client(object): """Class where the communication from KB to Keystone happens.""" - def __init__(self, dcmanager_url=None, username=None, api_key=None, - project_name=None, auth_url=None, project_id=None, - endpoint_type='publicURL', service_type='dcmanager', - auth_token=None, user_id=None, cacert=None, insecure=False, - profile=None, auth_type='keystone', client_id=None, - client_secret=None, session=None, **kwargs): + def __init__( + self, + dcmanager_url=None, + username=None, + api_key=None, + project_name=None, + auth_url=None, + project_id=None, + endpoint_type="publicURL", + service_type="dcmanager", + auth_token=None, + user_id=None, + cacert=None, + insecure=False, + profile=None, + auth_type="keystone", + _client_id=None, + _client_secret=None, + session=None, + **kwargs, + ): """DC Manager communicates with Keystone to fetch necessary values.""" if dcmanager_url and not isinstance(dcmanager_url, six.string_types): - raise RuntimeError('DC Manager url should be a string.') + raise RuntimeError("DC Manager url should be a string.") if auth_url or session: - if auth_type == 'keystone': - (dcmanager_url, auth_token, project_id, user_id) = ( - authenticate( - dcmanager_url, - username, - api_key, - project_name, - auth_url, - project_id, - endpoint_type, - service_type, - auth_token, - user_id, - session, - cacert, - insecure, - **kwargs - ) + if auth_type == "keystone": + (dcmanager_url, auth_token, project_id, user_id) = authenticate( + dcmanager_url, + username, + api_key, + project_name, + auth_url, + project_id, + endpoint_type, + service_type, + auth_token, + user_id, + session, + cacert, + insecure, + **kwargs, ) else: raise RuntimeError( - 'Invalid authentication type [value=%s, valid_values=%s]' - % (auth_type, 'keystone') + "Invalid authentication type " + f"[value={auth_type}, valid_values=keystone]" ) if not dcmanager_url: @@ -96,52 +109,66 @@ class Client(object): project_id, user_id, cacert=cacert, - insecure=insecure + insecure=insecure, ) # Create all managers self.subcloud_manager = sm.subcloud_manager(self.http_client) - self.subcloud_group_manager = \ - gm.subcloud_group_manager(self.http_client, self.subcloud_manager) - self.subcloud_peer_group_manager = \ - pm.subcloud_peer_group_manager(self.http_client, - self.subcloud_manager) - self.peer_group_association_manager = \ - pgam.peer_group_association_manager(self.http_client) - self.subcloud_backup_manager = sbm.subcloud_backup_manager( - self.http_client) - self.subcloud_deploy_manager = sdm.subcloud_deploy_manager( - self.http_client) + self.subcloud_group_manager = gm.subcloud_group_manager( + self.http_client, self.subcloud_manager + ) + self.subcloud_peer_group_manager = pm.subcloud_peer_group_manager( + self.http_client, self.subcloud_manager + ) + self.peer_group_association_manager = pgam.peer_group_association_manager( + self.http_client + ) + self.subcloud_backup_manager = sbm.subcloud_backup_manager(self.http_client) + self.subcloud_deploy_manager = sdm.subcloud_deploy_manager(self.http_client) self.system_peer_manager = sp.system_peer_manager( - self.http_client, self.subcloud_peer_group_manager) + self.http_client, self.subcloud_peer_group_manager + ) self.alarm_manager = am.alarm_manager(self.http_client) self.fw_update_manager = fum.fw_update_manager(self.http_client) - self.kube_rootca_update_manager = \ - krum.kube_rootca_update_manager(self.http_client) + self.kube_rootca_update_manager = krum.kube_rootca_update_manager( + self.http_client + ) self.kube_upgrade_manager = kupm.kube_upgrade_manager(self.http_client) self.sw_deploy_manager = swdm.SwDeployManager(self.http_client) self.sw_patch_manager = spm.sw_patch_manager(self.http_client) self.sw_prestage_manager = spr.sw_prestage_manager(self.http_client) - self.sw_update_options_manager = \ - suom.sw_update_options_manager(self.http_client) + self.sw_update_options_manager = suom.sw_update_options_manager( + self.http_client + ) self.sw_upgrade_manager = supm.sw_upgrade_manager(self.http_client) - self.strategy_step_manager = \ - ssm.strategy_step_manager(self.http_client) + self.strategy_step_manager = ssm.strategy_step_manager(self.http_client) self.sw_strategy_manager = sstm.sw_strategy_manager(self.http_client) - self.phased_subcloud_deploy_manager = \ - psdm.phased_subcloud_deploy_manager(self.http_client) + self.phased_subcloud_deploy_manager = psdm.phased_subcloud_deploy_manager( + self.http_client + ) -def authenticate(dcmanager_url=None, username=None, - api_key=None, project_name=None, auth_url=None, - project_id=None, endpoint_type='publicURL', - service_type='dcmanager', auth_token=None, user_id=None, - session=None, cacert=None, insecure=False, **kwargs): +def authenticate( + dcmanager_url=None, + username=None, + api_key=None, + project_name=None, + auth_url=None, + project_id=None, + endpoint_type="publicURL", + service_type="dcmanager", + auth_token=None, + user_id=None, + session=None, + cacert=None, + insecure=False, + **kwargs, +): """Get token, project_id, user_id and Endpoint.""" - user_domain_name = kwargs.get('user_domain_name') - user_domain_id = kwargs.get('user_domain_id') - project_domain_name = kwargs.get('project_domain_name') - project_domain_id = kwargs.get('project_domain_id') + user_domain_name = kwargs.get("user_domain_name") + user_domain_id = kwargs.get("user_domain_id") + project_domain_name = kwargs.get("project_domain_name") + project_domain_id = kwargs.get("project_domain_id") if session is None: if auth_token: @@ -153,7 +180,8 @@ def authenticate(dcmanager_url=None, username=None, project_domain_name=project_domain_name, project_domain_id=project_domain_id, cacert=cacert, - insecure=insecure) + insecure=insecure, + ) elif api_key and (username or user_id): auth = auth_plugin.Password( @@ -166,11 +194,14 @@ def authenticate(dcmanager_url=None, username=None, user_domain_name=user_domain_name, user_domain_id=user_domain_id, project_domain_name=project_domain_name, - project_domain_id=project_domain_id) + project_domain_id=project_domain_id, + ) else: - raise RuntimeError('You must either provide a valid token or' - 'a password (api_key) and a user.') + raise RuntimeError( + "You must either provide a valid token or" + "a password (api_key) and a user." + ) if auth: session = ks_session.Session(auth=auth) @@ -180,7 +211,7 @@ def authenticate(dcmanager_url=None, username=None, user_id = session.get_user_id() if not dcmanager_url: dcmanager_url = session.get_endpoint( - service_type=service_type, - interface=endpoint_type) + service_type=service_type, interface=endpoint_type + ) return dcmanager_url, token, project_id, user_id diff --git a/distributedcloud-client/dcmanagerclient/api/v1/peer_group_association_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/peer_group_association_manager.py index b33b0f3..7454e4d 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/peer_group_association_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/peer_group_association_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -9,25 +9,27 @@ import json from dcmanagerclient.api import base from dcmanagerclient.api.base import get_json -OPTION_NOT_APPLICABLE = 'Not applicable' -SYNC_STATUS_INVALID = 'invalid' -BASE_URL = '/peer-group-associations/' +OPTION_NOT_APPLICABLE = "Not applicable" +SYNC_STATUS_INVALID = "invalid" +BASE_URL = "/peer-group-associations/" class PeerGroupAssociation(base.Resource): - resource_name = 'peer_group_association' + resource_name = "peer_group_association" - def __init__(self, - manager, - association_id, - peer_group_id, - system_peer_id, - peer_group_priority, - association_type, - sync_status, - sync_message, - created_at, - updated_at): + def __init__( + self, + manager, + association_id, + peer_group_id, + system_peer_id, + peer_group_priority, + association_type, + sync_status, + sync_message, + created_at, + updated_at, + ): self.manager = manager self.association_id = association_id self.peer_group_id = peer_group_id @@ -44,19 +46,23 @@ class peer_group_association_manager(base.ResourceManager): resource_class = PeerGroupAssociation def _json_to_resource(self, json_object): - sync_message = None if 'sync-message' not in json_object else \ - json_object['sync-message'] + sync_message = ( + None + if "sync-message" not in json_object + else json_object["sync-message"] + ) return self.resource_class( self, - association_id=json_object['id'], - peer_group_id=json_object['peer-group-id'], - system_peer_id=json_object['system-peer-id'], - peer_group_priority=json_object['peer-group-priority'], - association_type=json_object['association-type'], - sync_status=json_object['sync-status'], + association_id=json_object["id"], + peer_group_id=json_object["peer-group-id"], + system_peer_id=json_object["system-peer-id"], + peer_group_priority=json_object["peer-group-priority"], + association_type=json_object["association-type"], + sync_status=json_object["sync-status"], sync_message=sync_message, - created_at=json_object['created-at'], - updated_at=json_object['updated-at']) + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) def _peer_group_association_detail(self, url): resp = self.http_client.get(url) @@ -71,7 +77,7 @@ class peer_group_association_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['peer_group_associations'] + json_objects = json_response_key["peer_group_associations"] resource = list() for json_object in json_objects: resource.append(self._json_to_resource(json_object)) @@ -117,14 +123,14 @@ class peer_group_association_manager(base.ResourceManager): return self._peer_group_association_detail(url) def sync_peer_group_association(self, associate_ref): - url = BASE_URL + '%s/sync' % associate_ref + url = BASE_URL + f"{associate_ref}/sync" return self.peer_group_association_sync(url) - def delete_peer_group_association(self, id): - url = BASE_URL + id + def delete_peer_group_association(self, peer_id): + url = BASE_URL + peer_id return self._delete(url) - def update_peer_group_association(self, id, **kwargs): + def update_peer_group_association(self, peer_id, **kwargs): data = kwargs - url = BASE_URL + id + url = BASE_URL + peer_id return self.peer_group_association_update(url, data) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/phased_subcloud_deploy_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/phased_subcloud_deploy_manager.py index ad96148..7d3bbfd 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/phased_subcloud_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/phased_subcloud_deploy_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -9,7 +9,7 @@ from requests_toolbelt import MultipartEncoder from dcmanagerclient.api import base from dcmanagerclient.api.base import get_json -BASE_URL = '/phased-subcloud-deploy/' +BASE_URL = "/phased-subcloud-deploy/" class phased_subcloud_deploy_manager(base.ResourceManager): @@ -21,16 +21,23 @@ class phased_subcloud_deploy_manager(base.ResourceManager): def _request_method(self, method, url, body, headers): method = method.lower() if method not in ("post", "patch", "put", "get", "delete"): - raise ValueError("Invalid request method: %s" % method) + raise ValueError(f"Invalid request method: {method}") return getattr(self.http_client, method)(url, body, headers) - def _deploy_operation(self, url, body, data, method='post'): + def _deploy_operation(self, url, body, data, method="post"): fields = dict() for k, v in body.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self._request_method(method, url, enc, headers) if resp.status_code != 200: self._raise_api_exception(resp) @@ -39,42 +46,42 @@ class phased_subcloud_deploy_manager(base.ResourceManager): return resource def subcloud_deploy_create(self, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') + data = kwargs.get("data") + files = kwargs.get("files") return self._deploy_operation(BASE_URL, files, data) def subcloud_deploy_install(self, subcloud_ref, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') - url = BASE_URL + "%s/install" % subcloud_ref - return self._deploy_operation(url, files, data, method='patch') + data = kwargs.get("data") + files = kwargs.get("files") + url = BASE_URL + f"{subcloud_ref}/install" + return self._deploy_operation(url, files, data, method="patch") def subcloud_deploy_bootstrap(self, subcloud_ref, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') - url = BASE_URL + "%s/bootstrap" % subcloud_ref - return self._deploy_operation(url, files, data, method='patch') + data = kwargs.get("data") + files = kwargs.get("files") + url = BASE_URL + f"{subcloud_ref}/bootstrap" + return self._deploy_operation(url, files, data, method="patch") def subcloud_deploy_config(self, subcloud_ref, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') - url = BASE_URL + "%s/configure" % subcloud_ref - return self._deploy_operation(url, files, data, method='patch') + data = kwargs.get("data") + files = kwargs.get("files") + url = BASE_URL + f"{subcloud_ref}/configure" + return self._deploy_operation(url, files, data, method="patch") def subcloud_deploy_complete(self, subcloud_ref): - url = BASE_URL + "%s/complete" % subcloud_ref - return self._deploy_operation(url, {}, {}, method='patch') + url = BASE_URL + f"{subcloud_ref}/complete" + return self._deploy_operation(url, {}, {}, method="patch") def subcloud_deploy_abort(self, subcloud_ref, **kwargs): # Currently it's not passed neither data or files to abort, # so we pass an empty dict to use the _deploy_operation function - data = kwargs.get('data', {}) - files = kwargs.get('files', {}) - url = BASE_URL + "%s/abort" % subcloud_ref - return self._deploy_operation(url, files, data, method='patch') + data = kwargs.get("data", {}) + files = kwargs.get("files", {}) + url = BASE_URL + f"{subcloud_ref}/abort" + return self._deploy_operation(url, files, data, method="patch") def subcloud_deploy_resume(self, subcloud_ref, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') - url = BASE_URL + "%s/resume" % subcloud_ref - return self._deploy_operation(url, files, data, method='patch') + data = kwargs.get("data") + files = kwargs.get("files") + url = BASE_URL + f"{subcloud_ref}/resume" + return self._deploy_operation(url, files, data, method="patch") diff --git a/distributedcloud-client/dcmanagerclient/api/v1/strategy_step_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/strategy_step_manager.py index 4e41677..d20b628 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/strategy_step_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/strategy_step_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,10 +19,20 @@ from dcmanagerclient.api.base import get_json class StrategyStep(base.Resource): - resource_name = 'strategy_step' + resource_name = "strategy_step" - def __init__(self, manager, cloud, stage, state, details, - started_at, finished_at, created_at, updated_at): + def __init__( + self, + manager, + cloud, + stage, + state, + details, + started_at, + finished_at, + created_at, + updated_at, + ): self.manager = manager self.cloud = cloud self.stage = stage @@ -39,27 +49,28 @@ class strategy_step_manager(base.ResourceManager): def __init__(self, http_client): super(strategy_step_manager, self).__init__(http_client) self.resource_class = StrategyStep - self.steps_url = '/sw-update-strategy/steps' - self.response_key = 'strategy-steps' + self.steps_url = "/sw-update-strategy/steps" + self.response_key = "strategy-steps" def list_strategy_steps(self): return self._strategy_step_list(self.steps_url) def strategy_step_detail(self, cloud_name): - url = '{}/{}'.format(self.steps_url, cloud_name) + url = f"{self.steps_url}/{cloud_name}" return self._strategy_step_detail(url) def build_from_json(self, json_object): return self.resource_class( self, - cloud=json_object['cloud'], - stage=json_object['stage'], - state=json_object['state'], - details=json_object['details'], - started_at=json_object['started-at'], - finished_at=json_object['finished-at'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at']) + cloud=json_object["cloud"], + stage=json_object["stage"], + state=json_object["state"], + details=json_object["details"], + started_at=json_object["started-at"], + finished_at=json_object["finished-at"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) def _strategy_step_list(self, url): resp = self.http_client.get(url) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_backup_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_backup_manager.py index 7d561ac..d797be0 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_backup_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_backup_manager.py @@ -1,6 +1,5 @@ - # -# Copyright (c) 2022 Wind River Systems, Inc. +# Copyright (c) 2022, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -25,20 +24,26 @@ class subcloud_backup_manager(base.ResourceManager): fields = dict() if files: for k, v in files.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.post(url, enc, headers=headers) if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = [] for json_object in json_objects: - resource.append( - self.json_to_resource(json_object)) + resource.append(self.json_to_resource(json_object)) return resource def subcloud_backup_delete(self, url, data): @@ -46,7 +51,7 @@ class subcloud_backup_manager(base.ResourceManager): fields = dict() fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.patch(url, enc, headers=headers) if resp.status_code not in {204, 207}: @@ -60,10 +65,17 @@ class subcloud_backup_manager(base.ResourceManager): fields = dict() if files: for k, v in files.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.patch(url, enc, headers=headers) @@ -71,25 +83,25 @@ class subcloud_backup_manager(base.ResourceManager): self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = [] for json_object in json_objects: resource.append(self.json_to_resource(json_object)) return resource def backup_subcloud_create(self, **kwargs): - files = kwargs.get('files') - data = kwargs.get('data') - url = '/subcloud-backup/' + files = kwargs.get("files") + data = kwargs.get("data") + url = "/subcloud-backup/" return self.subcloud_backup_create(url, files, data) def backup_subcloud_delete(self, release_version, **kwargs): - data = kwargs.get('data') - url = '/subcloud-backup/delete/%s' % release_version + data = kwargs.get("data") + url = f"/subcloud-backup/delete/{release_version}" return self.subcloud_backup_delete(url, data) def backup_subcloud_restore(self, **kwargs): - files = kwargs.get('files') - data = kwargs.get('data') - url = '/subcloud-backup/restore' + files = kwargs.get("files") + data = kwargs.get("data") + url = "/subcloud-backup/restore" return self.subcloud_backup_restore(url, files, data) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_deploy_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_deploy_manager.py index 16514e2..8bdd160 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_deploy_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2023 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +14,10 @@ # limitations under the License. # +from requests_toolbelt import MultipartEncoder + from dcmanagerclient.api import base from dcmanagerclient.api.base import get_json -from requests_toolbelt import MultipartEncoder class SubcloudDeploy(base.Resource): diff --git a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_group_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_group_manager.py index a2721c9..0e4173f 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_group_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_group_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,17 +22,19 @@ from dcmanagerclient.api.base import get_json class SubcloudGroup(base.Resource): - resource_name = 'subcloud_group' + resource_name = "subcloud_group" - def __init__(self, - manager, - group_id, - name, - description, - update_apply_type, - max_parallel_subclouds, - created_at, - updated_at): + def __init__( + self, + manager, + group_id, + name, + description, + update_apply_type, + max_parallel_subclouds, + created_at, + updated_at, + ): self.manager = manager self.group_id = group_id self.name = name @@ -53,13 +55,14 @@ class subcloud_group_manager(base.ResourceManager): def _json_to_resource(self, json_object): return self.resource_class( self, - group_id=json_object['id'], - name=json_object['name'], - description=json_object['description'], - update_apply_type=json_object['update_apply_type'], - max_parallel_subclouds=json_object['max_parallel_subclouds'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at']) + group_id=json_object["id"], + name=json_object["name"], + description=json_object["description"], + update_apply_type=json_object["update_apply_type"], + max_parallel_subclouds=json_object["max_parallel_subclouds"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) def subcloud_group_create(self, url, data): data = json.dumps(data) @@ -86,7 +89,7 @@ class subcloud_group_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subcloud_groups'] + json_objects = json_response_key["subcloud_groups"] resource = [] for json_object in json_objects: resource.append(self._json_to_resource(json_object)) @@ -106,35 +109,34 @@ class subcloud_group_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = [] for json_object in json_objects: - resource.append( - self.subcloud_manager.json_to_resource(json_object)) + resource.append(self.subcloud_manager.json_to_resource(json_object)) return resource def add_subcloud_group(self, **kwargs): data = kwargs - url = '/subcloud-groups/' + url = "/subcloud-groups/" return self.subcloud_group_create(url, data) def list_subcloud_groups(self): - url = '/subcloud-groups/' + url = "/subcloud-groups/" return self.subcloud_group_list(url) def subcloud_group_list_subclouds(self, subcloud_group_ref): - url = '/subcloud-groups/%s/subclouds' % subcloud_group_ref + url = f"/subcloud-groups/{subcloud_group_ref}/subclouds" return self._list_subclouds_for_subcloud_group(url) def subcloud_group_detail(self, subcloud_group_ref): - url = '/subcloud-groups/%s' % subcloud_group_ref + url = f"/subcloud-groups/{subcloud_group_ref}" return self._subcloud_group_detail(url) def delete_subcloud_group(self, subcloud_group_ref): - url = '/subcloud-groups/%s' % subcloud_group_ref + url = f"/subcloud-groups/{subcloud_group_ref}" return self._delete(url) def update_subcloud_group(self, subcloud_group_ref, **kwargs): data = kwargs - url = '/subcloud-groups/%s' % subcloud_group_ref + url = f"/subcloud-groups/{subcloud_group_ref}" return self.subcloud_group_update(url, data) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_manager.py index 4a23dfd..4713a25 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_manager.py @@ -32,10 +32,17 @@ class subcloud_manager(base.ResourceManager): def subcloud_create(self, url, body, data): fields = dict() for k, v in body.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.post(url, enc, headers=headers) if resp.status_code != 200: self._raise_api_exception(resp) @@ -48,10 +55,17 @@ class subcloud_manager(base.ResourceManager): fields = dict() if body is not None: for k, v in body.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.patch(url, enc, headers=headers) if resp.status_code != 200: self._raise_api_exception(resp) @@ -63,10 +77,17 @@ class subcloud_manager(base.ResourceManager): def subcloud_redeploy(self, url, body, data): fields = dict() for k, v in body.items(): - fields.update({k: (v, open(v, 'rb'),)}) + fields.update( + { + k: ( + v, + open(v, "rb"), + ) + } + ) fields.update(data) enc = MultipartEncoder(fields=fields) - headers = {'content-type': enc.content_type} + headers = {"content-type": enc.content_type} resp = self.http_client.patch(url, enc, headers=headers) if resp.status_code != 200: self._raise_api_exception(resp) @@ -83,9 +104,10 @@ class subcloud_manager(base.ResourceManager): json_object = get_json(resp) resource = list() resource.append(self.json_to_resource(json_object)) - if json_object.get('prestage_software_version'): - resource[0].prestage_software_version = \ - json_object['prestage_software_version'] + if json_object.get("prestage_software_version"): + resource[0].prestage_software_version = json_object[ + "prestage_software_version" + ] return resource def subcloud_list(self, url): @@ -93,7 +115,7 @@ class subcloud_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = self.resource_class.from_payloads(self, json_objects) return resource @@ -105,46 +127,47 @@ class subcloud_manager(base.ResourceManager): subcloud = self.resource_class.from_payload(self, json_object) resource = [subcloud] if detail is not None: - resource[0].oam_floating_ip = json_object['oam_floating_ip'] - resource[0].deploy_config_sync_status = \ - json_object['deploy_config_sync_status'] + resource[0].oam_floating_ip = json_object["oam_floating_ip"] + resource[0].deploy_config_sync_status = json_object[ + "deploy_config_sync_status" + ] return resource def add_subcloud(self, **kwargs): - data = kwargs.get('data') - files = kwargs.get('files') - url = '/subclouds/' + data = kwargs.get("data") + files = kwargs.get("files") + url = "/subclouds/" return self.subcloud_create(url, files, data) def list_subclouds(self): - url = '/subclouds/' + url = "/subclouds/" return self.subcloud_list(url) def subcloud_additional_details(self, subcloud_ref): - url = '/subclouds/%s/detail' % subcloud_ref + url = f"/subclouds/{subcloud_ref}/detail" return self._subcloud_detail(url, True) def subcloud_detail(self, subcloud_ref): - url = '/subclouds/%s' % subcloud_ref + url = f"/subclouds/{subcloud_ref}" return self._subcloud_detail(url) def delete_subcloud(self, subcloud_ref): - url = '/subclouds/%s' % subcloud_ref + url = f"/subclouds/{subcloud_ref}" return self._delete(url) def prestage_subcloud(self, subcloud_ref, **kwargs): - data = kwargs.get('data') - url = '/subclouds/%s/prestage' % subcloud_ref + data = kwargs.get("data") + url = f"/subclouds/{subcloud_ref}/prestage" return self._subcloud_prestage(url, data) def update_subcloud(self, subcloud_ref, **kwargs): - files = kwargs.get('files') - data = kwargs.get('data') - url = '/subclouds/%s' % subcloud_ref + files = kwargs.get("files") + data = kwargs.get("data") + url = f"/subclouds/{subcloud_ref}" return self.subcloud_update(url, files, data) def redeploy_subcloud(self, subcloud_ref, **kwargs): - files = kwargs.get('files') - data = kwargs.get('data') - url = '/subclouds/%s/redeploy' % subcloud_ref + files = kwargs.get("files") + data = kwargs.get("data") + url = f"/subclouds/{subcloud_ref}/redeploy" return self.subcloud_redeploy(url, files, data) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_peer_group_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_peer_group_manager.py index 07d0dfe..5c72966 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/subcloud_peer_group_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/subcloud_peer_group_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -8,23 +8,25 @@ import json from dcmanagerclient.api import base from dcmanagerclient.api.base import get_json -BASE_URL = '/subcloud-peer-groups/' +BASE_URL = "/subcloud-peer-groups/" class SubcloudPeerGroup(base.Resource): - resource_name = 'subcloud_peer_group' + resource_name = "subcloud_peer_group" - def __init__(self, - manager, - peer_group_id, - peer_group_name, - group_priority, - group_state, - system_leader_id, - system_leader_name, - max_subcloud_rehoming, - created_at, - updated_at): + def __init__( + self, + manager, + peer_group_id, + peer_group_name, + group_priority, + group_state, + system_leader_id, + system_leader_name, + max_subcloud_rehoming, + created_at, + updated_at, + ): self.manager = manager self.id = peer_group_id self.peer_group_name = peer_group_name @@ -48,15 +50,16 @@ class subcloud_peer_group_manager(base.ResourceManager): def json_to_resource(self, json_object): return self.resource_class( self, - peer_group_id=json_object['id'], - peer_group_name=json_object['peer_group_name'], - group_priority=json_object['group_priority'], - group_state=json_object['group_state'], - system_leader_id=json_object['system_leader_id'], - system_leader_name=json_object['system_leader_name'], - max_subcloud_rehoming=json_object['max_subcloud_rehoming'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at']) + peer_group_id=json_object["id"], + peer_group_name=json_object["peer_group_name"], + group_priority=json_object["group_priority"], + group_state=json_object["group_state"], + system_leader_id=json_object["system_leader_id"], + system_leader_name=json_object["system_leader_name"], + max_subcloud_rehoming=json_object["max_subcloud_rehoming"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) def _subcloud_peer_group_detail(self, url): resp = self.http_client.get(url) @@ -91,7 +94,7 @@ class subcloud_peer_group_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subcloud_peer_groups'] + json_objects = json_response_key["subcloud_peer_groups"] resource = list() for json_object in json_objects: resource.append(self.json_to_resource(json_object)) @@ -112,11 +115,10 @@ class subcloud_peer_group_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = list() for json_object in json_objects: - resource.append( - self.subcloud_manager.json_to_resource(json_object)) + resource.append(self.subcloud_manager.json_to_resource(json_object)) return resource def subcloud_peer_group_migrate(self, url, data): @@ -125,11 +127,10 @@ class subcloud_peer_group_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subclouds'] + json_objects = json_response_key["subclouds"] resource = list() for json_object in json_objects: - resource.append( - self.subcloud_manager.json_to_resource(json_object)) + resource.append(self.subcloud_manager.json_to_resource(json_object)) return resource def add_subcloud_peer_group(self, **kwargs): @@ -156,13 +157,13 @@ class subcloud_peer_group_manager(base.ResourceManager): def migrate_subcloud_peer_group(self, subcloud_peer_group_ref, **kwargs): data = kwargs - url = BASE_URL + '%s/migrate' % subcloud_peer_group_ref + url = BASE_URL + f"{subcloud_peer_group_ref}/migrate" return self.subcloud_peer_group_migrate(url, data) def subcloud_peer_group_list_subclouds(self, subcloud_peer_group_ref): - url = BASE_URL + '%s/subclouds' % subcloud_peer_group_ref + url = BASE_URL + f"{subcloud_peer_group_ref}/subclouds" return self._list_subclouds_for_subcloud_peer_group(url) def subcloud_peer_group_status(self, subcloud_peer_group_ref): - url = BASE_URL + '%s/status' % subcloud_peer_group_ref + url = BASE_URL + f"{subcloud_peer_group_ref}/status" return self._subcloud_peer_group_status(url) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/sw_strategy_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/sw_strategy_manager.py index 18de7aa..11ef960 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/sw_strategy_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/sw_strategy_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2021-2023 Wind River Systems, Inc. +# Copyright (c) 2021-2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,15 +18,13 @@ from dcmanagerclient.api.v1.sw_update_manager import sw_update_manager class sw_strategy_manager(sw_update_manager): - def __init__(self, http_client, url='sw-update-strategy'): - super(sw_strategy_manager, self).__init__( - http_client, - update_type=None) + def __init__(self, http_client, url="sw-update-strategy"): + super(sw_strategy_manager, self).__init__(http_client, update_type=None) # Removing strategy type from base class parameters - self.get_url = '/{url}'.format(url=url) - self.delete_url = '/{url}'.format(url=url) - self.actions_url = '/{url}/actions'.format(url=url) + self.get_url = f"/{url}" + self.delete_url = f"/{url}" + self.actions_url = f"/{url}/actions" def extract_extra_args(self, json_object): # Since this generic strategy manager can interact with any strategy diff --git a/distributedcloud-client/dcmanagerclient/api/v1/sw_update_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/sw_update_manager.py index b4e3f8d..325aee8 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/sw_update_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/sw_update_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017-2023 Wind River Systems, Inc. +# Copyright (c) 2017-2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,18 +22,20 @@ from dcmanagerclient.api.base import get_json class SwUpdateStrategy(base.Resource): - resource_name = 'sw_update_strategy' + resource_name = "sw_update_strategy" - def __init__(self, - manager, - strategy_type, - subcloud_apply_type, - max_parallel_subclouds, - stop_on_failure, - state, - created_at, - updated_at, - extra_args=None): + def __init__( + self, + manager, + strategy_type, + subcloud_apply_type, + max_parallel_subclouds, + stop_on_failure, + state, + created_at, + updated_at, + extra_args=None, + ): self.manager = manager self.strategy_type = strategy_type self.subcloud_apply_type = subcloud_apply_type @@ -48,30 +50,30 @@ class SwUpdateStrategy(base.Resource): class sw_update_manager(base.ResourceManager): """sw_update_manager - sw_update_manager is an abstract class that is used by subclasses to - manage API actions for specific update strategy types such as software - patches and firmware updates. + sw_update_manager is an abstract class that is used by subclasses to + manage API actions for specific update strategy types such as software + patches and firmware updates. """ - def __init__(self, http_client, - update_type, - resource_class=SwUpdateStrategy, - url='sw-update-strategy', - extra_args=None): + def __init__( + self, + http_client, + update_type, + resource_class=SwUpdateStrategy, + url="sw-update-strategy", + extra_args=None, + ): super(sw_update_manager, self).__init__(http_client) self.resource_class = resource_class self.update_type = update_type # create_url is typically // - self.create_url = '/{}/'.format(url) + self.create_url = f"/{url}/" # get_url is typically / - self.get_url = '/{url}?type={update_type}'.format( - url=url, update_type=self.update_type) + self.get_url = f"/{url}?type={update_type}" # delete_url is typically / (same as get) - self.delete_url = '/{url}?type={update_type}'.format( - url=url, update_type=self.update_type) + self.delete_url = f"/{url}?type={update_type}" # actions_url is typically //actions - self.actions_url = '/{url}/actions?type={update_type}'.format( - url=url, update_type=self.update_type) + self.actions_url = f"/{url}/actions?type={update_type}" if extra_args is None: self.extra_args = [] @@ -81,7 +83,7 @@ class sw_update_manager(base.ResourceManager): def create_sw_update_strategy(self, **kwargs): data = kwargs if self.update_type is not None: - data.update({'type': self.update_type}) + data.update({"type": self.update_type}) return self._sw_update_create(self.create_url, data) def update_sw_strategy_detail(self): @@ -91,11 +93,11 @@ class sw_update_manager(base.ResourceManager): return self._sw_update_delete(self.delete_url) def apply_sw_update_strategy(self): - data = {'action': 'apply'} + data = {"action": "apply"} return self._sw_update_action(self.actions_url, data) def abort_sw_update_strategy(self): - data = {'action': 'abort'} + data = {"action": "abort"} return self._sw_update_action(self.actions_url, data) def extract_extra_args(self, json_object): @@ -115,14 +117,15 @@ class sw_update_manager(base.ResourceManager): def _build_from_json(self, json_object): return self.resource_class( self, - strategy_type=json_object['type'], - subcloud_apply_type=json_object['subcloud-apply-type'], - max_parallel_subclouds=json_object['max-parallel-subclouds'], - stop_on_failure=json_object['stop-on-failure'], - state=json_object['state'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at'], - extra_args=self.extract_extra_args(json_object)) + strategy_type=json_object["type"], + subcloud_apply_type=json_object["subcloud-apply-type"], + max_parallel_subclouds=json_object["max-parallel-subclouds"], + stop_on_failure=json_object["stop-on-failure"], + state=json_object["state"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + extra_args=self.extract_extra_args(json_object), + ) def _sw_update_create(self, url, data): data = json.dumps(data) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/sw_update_options_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/sw_update_options_manager.py index c3ff1c9..dbbaf75 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/sw_update_options_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/sw_update_options_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017, 2019, 2021 Wind River Systems, Inc. +# Copyright (c) 2017, 2019, 2021, 2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,12 +24,20 @@ DEFAULT_REGION_NAME = "RegionOne" class SwUpdateOptions(base.Resource): - resource_name = 'sw_update_options' + resource_name = "sw_update_options" - def __init__(self, manager, cloud, storage_apply_type, worker_apply_type, - max_parallel_workers, alarm_restriction_type, - default_instance_action, - created_at, updated_at): + def __init__( + self, + manager, + cloud, + storage_apply_type, + worker_apply_type, + max_parallel_workers, + alarm_restriction_type, + default_instance_action, + created_at, + updated_at, + ): self.manager = manager self.cloud = cloud self.storage_apply_type = storage_apply_type @@ -47,27 +55,27 @@ class sw_update_options_manager(base.ResourceManager): def sw_update_options_update(self, subcloud_ref, **kwargs): data = kwargs if subcloud_ref: - url = '/sw-update-options/%s' % subcloud_ref + url = f"/sw-update-options/{subcloud_ref}" else: - url = '/sw-update-options/%s' % DEFAULT_REGION_NAME + url = f"/sw-update-options/{DEFAULT_REGION_NAME}" return self._sw_update_options_update(url, data) def sw_update_options_list(self): - url = '/sw-update-options' + url = "/sw-update-options" return self._sw_update_options_list(url) def sw_update_options_detail(self, subcloud_ref): if subcloud_ref: - url = '/sw-update-options/%s' % subcloud_ref + url = f"/sw-update-options/{subcloud_ref}" else: - url = '/sw-update-options/%s' % DEFAULT_REGION_NAME + url = f"/sw-update-options/{DEFAULT_REGION_NAME}" return self._sw_update_options_detail(url) def sw_update_options_delete(self, subcloud_ref): if subcloud_ref: - url = '/sw-update-options/%s' % subcloud_ref + url = f"/sw-update-options/{subcloud_ref}" else: - url = '/sw-update-options/%s' % DEFAULT_REGION_NAME + url = f"/sw-update-options/{DEFAULT_REGION_NAME}" return self._sw_update_options_delete(url) def _sw_update_options_detail(self, url): @@ -79,14 +87,16 @@ class sw_update_options_manager(base.ResourceManager): resource.append( self.resource_class( self, - cloud=json_object['name'], - storage_apply_type=json_object['storage-apply-type'], - worker_apply_type=json_object['worker-apply-type'], - max_parallel_workers=json_object['max-parallel-workers'], - alarm_restriction_type=json_object['alarm-restriction-type'], - default_instance_action=json_object['default-instance-action'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at'])) + cloud=json_object["name"], + storage_apply_type=json_object["storage-apply-type"], + worker_apply_type=json_object["worker-apply-type"], + max_parallel_workers=json_object["max-parallel-workers"], + alarm_restriction_type=json_object["alarm-restriction-type"], + default_instance_action=json_object["default-instance-action"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) + ) return resource def _sw_update_options_list(self, url): @@ -94,22 +104,22 @@ class sw_update_options_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['sw-update-options'] + json_objects = json_response_key["sw-update-options"] resource = [] for json_object in json_objects: resource.append( self.resource_class( self, - cloud=json_object['name'], - storage_apply_type=json_object['storage-apply-type'], - worker_apply_type=json_object['worker-apply-type'], - max_parallel_workers=json_object['max-parallel-workers'], - alarm_restriction_type=json_object[ - 'alarm-restriction-type'], - default_instance_action=json_object[ - 'default-instance-action'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at'])) + cloud=json_object["name"], + storage_apply_type=json_object["storage-apply-type"], + worker_apply_type=json_object["worker-apply-type"], + max_parallel_workers=json_object["max-parallel-workers"], + alarm_restriction_type=json_object["alarm-restriction-type"], + default_instance_action=json_object["default-instance-action"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) + ) return resource def _sw_update_options_delete(self, url): @@ -127,12 +137,14 @@ class sw_update_options_manager(base.ResourceManager): resource.append( self.resource_class( self, - cloud=json_object['name'], - storage_apply_type=json_object['storage-apply-type'], - worker_apply_type=json_object['worker-apply-type'], - max_parallel_workers=json_object['max-parallel-workers'], - alarm_restriction_type=json_object['alarm-restriction-type'], - default_instance_action=json_object['default-instance-action'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at'])) + cloud=json_object["name"], + storage_apply_type=json_object["storage-apply-type"], + worker_apply_type=json_object["worker-apply-type"], + max_parallel_workers=json_object["max-parallel-workers"], + alarm_restriction_type=json_object["alarm-restriction-type"], + default_instance_action=json_object["default-instance-action"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) + ) return resource diff --git a/distributedcloud-client/dcmanagerclient/api/v1/sw_upgrade_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/sw_upgrade_manager.py index 1ef2939..54379c6 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/sw_upgrade_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/sw_upgrade_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,12 +16,12 @@ # from dcmanagerclient.api.v1.sw_update_manager import sw_update_manager -SW_UPDATE_TYPE_UPGRADE = 'upgrade' +SW_UPDATE_TYPE_UPGRADE = "upgrade" class sw_upgrade_manager(sw_update_manager): def __init__(self, http_client): super(sw_upgrade_manager, self).__init__( - http_client, - update_type=SW_UPDATE_TYPE_UPGRADE) + http_client, update_type=SW_UPDATE_TYPE_UPGRADE + ) diff --git a/distributedcloud-client/dcmanagerclient/api/v1/system_peer_manager.py b/distributedcloud-client/dcmanagerclient/api/v1/system_peer_manager.py index 8900d55..d375a3d 100644 --- a/distributedcloud-client/dcmanagerclient/api/v1/system_peer_manager.py +++ b/distributedcloud-client/dcmanagerclient/api/v1/system_peer_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -9,28 +9,30 @@ import json from dcmanagerclient.api import base from dcmanagerclient.api.base import get_json -BASE_URL = '/system-peers/' +BASE_URL = "/system-peers/" class SystemPeer(base.Resource): - resource_name = 'system_peer' + resource_name = "system_peer" - def __init__(self, - manager, - peer_id, - peer_uuid, - peer_name, - manager_endpoint, - manager_username, - peer_controller_gateway_address, - administrative_state, - heartbeat_interval, - heartbeat_failure_threshold, - heartbeat_failure_policy, - heartbeat_maintenance_timeout, - availability_state, - created_at, - updated_at): + def __init__( + self, + manager, + peer_id, + peer_uuid, + peer_name, + manager_endpoint, + manager_username, + peer_controller_gateway_address, + administrative_state, + heartbeat_interval, + heartbeat_failure_threshold, + heartbeat_failure_policy, + heartbeat_maintenance_timeout, + availability_state, + created_at, + updated_at, + ): self.manager = manager self.peer_id = peer_id self.peer_uuid = peer_uuid @@ -58,23 +60,25 @@ class system_peer_manager(base.ResourceManager): def _json_to_resource(self, json_object): return self.resource_class( self, - peer_id=json_object['id'], - peer_uuid=json_object['peer-uuid'], - peer_name=json_object['peer-name'], - manager_endpoint=json_object['manager-endpoint'], - manager_username=json_object['manager-username'], + peer_id=json_object["id"], + peer_uuid=json_object["peer-uuid"], + peer_name=json_object["peer-name"], + manager_endpoint=json_object["manager-endpoint"], + manager_username=json_object["manager-username"], peer_controller_gateway_address=json_object[ - 'peer-controller-gateway-address'], - administrative_state=json_object['administrative-state'], - heartbeat_interval=json_object['heartbeat-interval'], - heartbeat_failure_threshold=json_object[ - 'heartbeat-failure-threshold'], - heartbeat_failure_policy=json_object['heartbeat-failure-policy'], + "peer-controller-gateway-address" + ], + administrative_state=json_object["administrative-state"], + heartbeat_interval=json_object["heartbeat-interval"], + heartbeat_failure_threshold=json_object["heartbeat-failure-threshold"], + heartbeat_failure_policy=json_object["heartbeat-failure-policy"], heartbeat_maintenance_timeout=json_object[ - 'heartbeat-maintenance-timeout'], - availability_state=json_object['availability-state'], - created_at=json_object['created-at'], - updated_at=json_object['updated-at']) + "heartbeat-maintenance-timeout" + ], + availability_state=json_object["availability-state"], + created_at=json_object["created-at"], + updated_at=json_object["updated-at"], + ) def system_peer_create(self, url, data): data = json.dumps(data) @@ -101,7 +105,7 @@ class system_peer_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['system_peers'] + json_objects = json_response_key["system_peers"] resource = list() for json_object in json_objects: resource.append(self._json_to_resource(json_object)) @@ -121,11 +125,12 @@ class system_peer_manager(base.ResourceManager): if resp.status_code != 200: self._raise_api_exception(resp) json_response_key = get_json(resp) - json_objects = json_response_key['subcloud_peer_groups'] + json_objects = json_response_key["subcloud_peer_groups"] resource = list() for json_object in json_objects: resource.append( - self.subcloud_peer_group_manager.json_to_resource(json_object)) + self.subcloud_peer_group_manager.json_to_resource(json_object) + ) return resource def add_system_peer(self, **kwargs): @@ -151,5 +156,5 @@ class system_peer_manager(base.ResourceManager): return self.system_peer_update(url, data) def system_peer_list_peer_groups(self, system_peer_ref): - url = BASE_URL + '%s/subcloud-peer-groups' % system_peer_ref + url = f"{BASE_URL}{system_peer_ref}/subcloud-peer-groups" return self._list_peer_groups_for_system_peer(url) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/alarm_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/alarm_manager.py index 5ceca5f..d59b721 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/alarm_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/alarm_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,28 +17,28 @@ from dcmanagerclient.commands.v1 import base -def format(alarms=None): +def basic_format(alarms=None): columns = ( - 'NAME', - 'CRITICAL_ALARMS', - 'MAJOR_ALARMS', - 'MINOR_ALARMS', - 'WARNINGS', - 'STATUS' + "NAME", + "CRITICAL_ALARMS", + "MAJOR_ALARMS", + "MINOR_ALARMS", + "WARNINGS", + "STATUS", ) if alarms: data = ( - alarms.name if alarms.name else '-', - alarms.critical if int(alarms.critical) >= 0 else '-', - alarms.major if int(alarms.major) >= 0 else '-', - alarms.minor if int(alarms.minor) >= 0 else '-', - alarms.warnings if int(alarms.warnings) >= 0 else '-', - alarms.status + alarms.name if alarms.name else "-", + alarms.critical if int(alarms.critical) >= 0 else "-", + alarms.major if int(alarms.major) >= 0 else "-", + alarms.minor if int(alarms.minor) >= 0 else "-", + alarms.warnings if int(alarms.warnings) >= 0 else "-", + alarms.status, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -47,7 +47,7 @@ class ListAlarmSummary(base.DCManagerLister): """List alarm summaries of subclouds.""" def _get_format_function(self): - return format + return basic_format def get_parser(self, prog_name): parser = super(ListAlarmSummary, self).get_parser(prog_name) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/fw_update_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/fw_update_manager.py index a5c3bb6..f27c868 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/fw_update_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/fw_update_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,31 +24,31 @@ class FwUpdateManagerMixin(object): return dcmanager_client.fw_update_manager -class CreateFwUpdateStrategy(FwUpdateManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreateFwUpdateStrategy( + FwUpdateManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a firmware update strategy.""" - pass -class ShowFwUpdateStrategy(FwUpdateManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowFwUpdateStrategy( + FwUpdateManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a firmware update strategy for a subcloud.""" - pass -class DeleteFwUpdateStrategy(FwUpdateManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeleteFwUpdateStrategy( + FwUpdateManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete firmware update strategy from the database.""" - pass -class ApplyFwUpdateStrategy(FwUpdateManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplyFwUpdateStrategy( + FwUpdateManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a firmware update strategy.""" - pass -class AbortFwUpdateStrategy(FwUpdateManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortFwUpdateStrategy( + FwUpdateManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a firmware update strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/kube_rootca_update_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/kube_rootca_update_manager.py index e8d8086..e91298f 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/kube_rootca_update_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/kube_rootca_update_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2021 Wind River Systems, Inc. +# Copyright (c) 2021, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -16,69 +16,66 @@ class KubeRootcaUpdateManagerMixin(object): return dcmanager_client.kube_rootca_update_manager -class CreateKubeRootcaUpdateStrategy(KubeRootcaUpdateManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreateKubeRootcaUpdateStrategy( + KubeRootcaUpdateManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a kube rootca update strategy. - This strategy supports: expiry-date, subject and cert-file + This strategy supports: expiry-date, subject and cert-file """ def get_parser(self, prog_name): - parser = super(CreateKubeRootcaUpdateStrategy, - self).get_parser(prog_name) + parser = super(CreateKubeRootcaUpdateStrategy, self).get_parser(prog_name) parser.add_argument( - '--subject', + "--subject", required=False, - help='A subject for a generated certificate.' + help="A subject for a generated certificate.", ) parser.add_argument( - '--expiry-date', + "--expiry-date", required=False, - help='Expiry date for a generated certificate.' + help="Expiry date for a generated certificate.", ) parser.add_argument( - '--cert-file', - required=False, - help='Path to a certificate to upload.' + "--cert-file", required=False, help="Path to a certificate to upload." ) return parser def process_custom_params(self, parsed_args, kwargs_dict): """Updates kwargs dictionary from parsed_args for kube rootca update""" if parsed_args.subject: - kwargs_dict['subject'] = parsed_args.subject + kwargs_dict["subject"] = parsed_args.subject # Note the "-" vs "_" when dealing with parsed_args if parsed_args.expiry_date: - kwargs_dict['expiry-date'] = parsed_args.expiry_date + kwargs_dict["expiry-date"] = parsed_args.expiry_date if parsed_args.cert_file: # Need an absolute path for the cert-file - kwargs_dict['cert-file'] = os.path.abspath(parsed_args.cert_file) + kwargs_dict["cert-file"] = os.path.abspath(parsed_args.cert_file) # override validate_force_params defined in CreateSwUpdateStrategy def validate_force_params(self, parsed_args): """Disable validating the force option. Allows multiple subclouds.""" - pass -class ShowKubeRootcaUpdateStrategy(KubeRootcaUpdateManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowKubeRootcaUpdateStrategy( + KubeRootcaUpdateManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a kube rootca update strategy for a subcloud.""" - pass -class DeleteKubeRootcaUpdateStrategy(KubeRootcaUpdateManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeleteKubeRootcaUpdateStrategy( + KubeRootcaUpdateManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete kube rootca update strategy from the database.""" - pass -class ApplyKubeRootcaUpdateStrategy(KubeRootcaUpdateManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplyKubeRootcaUpdateStrategy( + KubeRootcaUpdateManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a kube rootca update strategy.""" - pass -class AbortKubeRootcaUpdateStrategy(KubeRootcaUpdateManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortKubeRootcaUpdateStrategy( + KubeRootcaUpdateManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a kube rootca update strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/kube_upgrade_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/kube_upgrade_manager.py index 446663a..a5e2cc4 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/kube_upgrade_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/kube_upgrade_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,17 +24,17 @@ class KubeUpgradeManagerMixin(object): return dcmanager_client.kube_upgrade_manager -class CreateKubeUpgradeStrategy(KubeUpgradeManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreateKubeUpgradeStrategy( + KubeUpgradeManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a kubernetes upgrade strategy.""" def get_parser(self, prog_name): - parser = super(CreateKubeUpgradeStrategy, - self).get_parser(prog_name) + parser = super(CreateKubeUpgradeStrategy, self).get_parser(prog_name) parser.add_argument( - '--to-version', + "--to-version", required=False, - help='Specify a version other than the system controller version.' + help="Specify a version other than the system controller version.", ) return parser @@ -42,33 +42,32 @@ class CreateKubeUpgradeStrategy(KubeUpgradeManagerMixin, """Updates kwargs dictionary from parsed_args for kube upgrade""" # Note the "-" vs "_" when dealing with parsed_args if parsed_args.to_version: - kwargs_dict['to-version'] = parsed_args.to_version + kwargs_dict["to-version"] = parsed_args.to_version # override validate_force_params defined in CreateSwUpdateStrategy def validate_force_params(self, parsed_args): """Disable validating the force option. Allows multiple subclouds.""" - pass -class ShowKubeUpgradeStrategy(KubeUpgradeManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowKubeUpgradeStrategy( + KubeUpgradeManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a kubernetes upgrade strategy for a subcloud.""" - pass -class DeleteKubeUpgradeStrategy(KubeUpgradeManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeleteKubeUpgradeStrategy( + KubeUpgradeManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete kubernetes upgrade strategy from the database.""" - pass -class ApplyKubeUpgradeStrategy(KubeUpgradeManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplyKubeUpgradeStrategy( + KubeUpgradeManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a kubernetes upgrade strategy.""" - pass -class AbortKubeUpgradeStrategy(KubeUpgradeManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortKubeUpgradeStrategy( + KubeUpgradeManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a kubernetes upgrade strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/peer_group_association_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/peer_group_association_manager.py index 151baa6..d8f0bdf 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/peer_group_association_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/peer_group_association_manager.py @@ -1,23 +1,23 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # from osc_lib.command import command -from dcmanagerclient.commands.v1 import base from dcmanagerclient import exceptions +from dcmanagerclient.commands.v1 import base def association_format(peer_group_association=None): columns = ( - 'id', - 'peer_group_id', - 'system_peer_id', - 'type', - 'sync_status', - 'peer_group_priority' + "id", + "peer_group_id", + "system_peer_id", + "type", + "sync_status", + "peer_group_priority", ) if peer_group_association: @@ -27,11 +27,11 @@ def association_format(peer_group_association=None): peer_group_association.system_peer_id, peer_group_association.association_type, peer_group_association.sync_status, - peer_group_association.peer_group_priority + peer_group_association.peer_group_priority, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -40,15 +40,15 @@ def detail_association_format(peer_group_association=None): # Include all the fields in detail_association_format # plus some additional fields columns = ( - 'id', - 'peer_group_id', - 'system_peer_id', - 'association_type', - 'sync_status', - 'peer_group_priority', - 'sync_message', - 'created_at', - 'updated_at', + "id", + "peer_group_id", + "system_peer_id", + "association_type", + "sync_status", + "peer_group_priority", + "sync_message", + "created_at", + "updated_at", ) if peer_group_association: @@ -61,10 +61,10 @@ def detail_association_format(peer_group_association=None): peer_group_association.peer_group_priority, peer_group_association.sync_message, peer_group_association.created_at, - peer_group_association.updated_at + peer_group_association.updated_at, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -79,32 +79,27 @@ class AddPeerGroupAssociation(base.DCManagerShowOne): parser = super(AddPeerGroupAssociation, self).get_parser(prog_name) parser.add_argument( - '--peer-group-id', - required=True, - help='Subcloud peer group ID.' + "--peer-group-id", required=True, help="Subcloud peer group ID." ) parser.add_argument( - '--system-peer-id', - required=True, - help='System Peer ID.' + "--system-peer-id", required=True, help="System Peer ID." ) parser.add_argument( - '--peer-group-priority', + "--peer-group-priority", required=True, type=int, - help='Priority of this peer group.' + help="Priority of this peer group.", ) return parser def _get_resources(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager kwargs = { - 'peer_group_id': parsed_args.peer_group_id, - 'system_peer_id': parsed_args.system_peer_id, - 'peer_group_priority': parsed_args.peer_group_priority + "peer_group_id": parsed_args.peer_group_id, + "system_peer_id": parsed_args.system_peer_id, + "peer_group_priority": parsed_args.peer_group_priority, } return dcmanager_client.peer_group_association_manager.\ add_peer_group_association(**kwargs) @@ -121,8 +116,7 @@ class ListPeerGroupAssociation(base.DCManagerLister): return parser def _get_resources(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager return dcmanager_client.peer_group_association_manager.\ list_peer_group_associations() @@ -137,16 +131,14 @@ class ShowPeerGroupAssociation(base.DCManagerShowOne): parser = super(ShowPeerGroupAssociation, self).get_parser(prog_name) parser.add_argument( - 'id', - help='ID of the peer group association to view the details.' + "id", help="ID of the peer group association to view the details." ) return parser def _get_resources(self, parsed_args): association_ref = parsed_args.id - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager return dcmanager_client.peer_group_association_manager.\ peer_group_association_detail(association_ref) @@ -160,17 +152,13 @@ class SyncPeerGroupAssociation(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(SyncPeerGroupAssociation, self).get_parser(prog_name) - parser.add_argument( - 'id', - help='ID of the peer group association to sync.' - ) + parser.add_argument("id", help="ID of the peer group association to sync.") return parser def _get_resources(self, parsed_args): association_ref = parsed_args.id - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager return dcmanager_client.peer_group_association_manager.\ sync_peer_group_association(association_ref) @@ -181,22 +169,17 @@ class DeletePeerGroupAssociation(command.Command): def get_parser(self, prog_name): parser = super(DeletePeerGroupAssociation, self).get_parser(prog_name) - parser.add_argument( - 'id', - help='ID of the peer group association to delete.' - ) + parser.add_argument("id", help="ID of the peer group association to delete.") return parser def take_action(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager try: dcmanager_client.peer_group_association_manager.\ delete_peer_group_association(parsed_args.id) except Exception as e: print(e) - msg = "Unable to delete peer group association %s" % ( - parsed_args.id) + msg = f"Unable to delete peer group association {parsed_args.id}" raise exceptions.DCManagerClientException(msg) @@ -209,29 +192,24 @@ class UpdatePeerGroupAssociation(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(UpdatePeerGroupAssociation, self).get_parser(prog_name) - parser.add_argument( - 'id', - help='ID of the peer group association to update.' - ) + parser.add_argument("id", help="ID of the peer group association to update.") parser.add_argument( - '--peer-group-priority', + "--peer-group-priority", required=True, type=int, - help='Priority of the subcloud peer group in this association.' + help="Priority of the subcloud peer group in this association.", ) return parser def _get_resources(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - peer_group_association_manager + dcmanager_client = self.app.client_manager.peer_group_association_manager - kwargs = {'peer_group_priority': parsed_args.peer_group_priority} + kwargs = {"peer_group_priority": parsed_args.peer_group_priority} try: return dcmanager_client.peer_group_association_manager.\ update_peer_group_association(parsed_args.id, **kwargs) except Exception as e: print(e) - msg = "Unable to update peer group association %s" % ( - parsed_args.id) + msg = f"Unable to update peer group association {parsed_args.id}" raise exceptions.DCManagerClientException(msg) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/phased_subcloud_deploy_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/phased_subcloud_deploy_manager.py index 4da924a..47a5938 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/phased_subcloud_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/phased_subcloud_deploy_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -7,9 +7,8 @@ import base64 import os +from dcmanagerclient import exceptions, utils from dcmanagerclient.commands.v1 import base -from dcmanagerclient import exceptions -from dcmanagerclient import utils class AbortPhasedSubcloudDeploy(base.DCManagerShowOne): @@ -22,8 +21,8 @@ class AbortPhasedSubcloudDeploy(base.DCManagerShowOne): parser = super(AbortPhasedSubcloudDeploy, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to abort the on-going deployment.' + "subcloud", + help="Name or ID of the subcloud to abort the on-going deployment.", ) return parser @@ -34,11 +33,10 @@ class AbortPhasedSubcloudDeploy(base.DCManagerShowOne): phased_subcloud_deploy_manager.phased_subcloud_deploy_manager try: - return dcmanager_client.subcloud_deploy_abort( - subcloud_ref=subcloud_ref) + return dcmanager_client.subcloud_deploy_abort(subcloud_ref=subcloud_ref) except Exception as e: print(e) - error_msg = "Unable to abort subcloud deploy %s" % (subcloud_ref) + error_msg = f"Unable to abort subcloud deploy {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -52,118 +50,122 @@ class PhasedSubcloudDeployResume(base.DCManagerShowOne): parser = super().get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to resume deployment.' + "subcloud", help="Name or ID of the subcloud to resume deployment." ) parser.add_argument( - '--bootstrap-address', + "--bootstrap-address", required=False, - help='IP address for initial subcloud controller.' + help="IP address for initial subcloud controller.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=False, - help='YAML file containing parameters required for the bootstrap ' - 'of the subcloud.' + help="YAML file containing parameters required for the bootstrap " + "of the subcloud.", ) parser.add_argument( - '--deploy-config', + "--deploy-config", required=False, - help='YAML file containing parameters required for the initial ' - 'configuration and unlock of the subcloud.' + help="YAML file containing parameters required for the initial " + "configuration and unlock of the subcloud.", ) parser.add_argument( - '--install-values', + "--install-values", required=False, - help='YAML file containing parameters required for the remote ' - 'install of the subcloud.' + help="YAML file containing parameters required for the remote " + "install of the subcloud.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) parser.add_argument( - '--bmc-password', + "--bmc-password", required=False, - help='bmc password of the subcloud to be configured, ' - 'if not provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, " + "if not provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--release', + "--release", required=False, - help='software release used to install, bootstrap and/or deploy ' - 'the subcloud with. If not specified, the current software ' - 'release of the system controller will be used.' + help="software release used to install, bootstrap and/or deploy " + "the subcloud with. If not specified, the current software " + "release of the system controller will be used.", ) return parser def _get_resources(self, parsed_args): subcloud_ref = parsed_args.subcloud - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager files = dict() data = dict() if parsed_args.bootstrap_address: - data['bootstrap-address'] = parsed_args.bootstrap_address + data["bootstrap-address"] = parsed_args.bootstrap_address # Get the bootstrap values yaml file if parsed_args.bootstrap_values: if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + "bootstrap-values does not exist: " + f"{parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values # Get the install values yaml file if parsed_args.install_values: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values # Get the deploy config yaml file if parsed_args.deploy_config: if not os.path.isfile(parsed_args.deploy_config): - error_msg = "deploy-config does not exist: %s" % \ - parsed_args.deploy_config + error_msg = ( + f"deploy-config does not exist: {parsed_args.deploy_config}" + ) raise exceptions.DCManagerClientException(error_msg) - files['deploy_config'] = parsed_args.deploy_config + files["deploy_config"] = parsed_args.deploy_config # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.install_values: if parsed_args.bmc_password: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.release: - data['release'] = parsed_args.release + data["release"] = parsed_args.release return dcmanager_client.subcloud_deploy_resume( - subcloud_ref=subcloud_ref, files=files, data=data) + subcloud_ref=subcloud_ref, files=files, data=data + ) class CreatePhasedSubcloudDeploy(base.DCManagerShowOne): @@ -176,102 +178,102 @@ class CreatePhasedSubcloudDeploy(base.DCManagerShowOne): parser = super().get_parser(prog_name) parser.add_argument( - '--bootstrap-address', + "--bootstrap-address", required=True, - help='IP address for initial subcloud controller.' + help="IP address for initial subcloud controller.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=True, - help='YAML file containing parameters required for the bootstrap ' - 'of the subcloud.' + help="YAML file containing parameters required for the bootstrap " + "of the subcloud.", ) parser.add_argument( - '--deploy-config', + "--deploy-config", required=False, - help='YAML file containing parameters required for the initial ' - 'configuration and unlock of the subcloud.' + help="YAML file containing parameters required for the initial " + "configuration and unlock of the subcloud.", ) parser.add_argument( - '--install-values', + "--install-values", required=False, - help='YAML file containing parameters required for the remote ' - 'install of the subcloud.' + help="YAML file containing parameters required for the remote " + "install of the subcloud.", ) parser.add_argument( - '--bmc-password', + "--bmc-password", required=False, - help='bmc password of the subcloud to be configured, ' - 'if not provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, " + "if not provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--group', - required=False, - help='Name or ID of subcloud group.' + "--group", required=False, help="Name or ID of subcloud group." ) parser.add_argument( - '--release', + "--release", required=False, - help='software release used to install, bootstrap and/or deploy ' - 'the subcloud with. If not specified, the current software ' - 'release of the system controller will be used.' + help="software release used to install, bootstrap and/or deploy " + "the subcloud with. If not specified, the current software " + "release of the system controller will be used.", ) return parser def _get_resources(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager files = dict() data = dict() - data['bootstrap-address'] = parsed_args.bootstrap_address + data["bootstrap-address"] = parsed_args.bootstrap_address # Get the bootstrap values yaml file if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + f"bootstrap-values does not exist: {parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values # Get the deploy config yaml file if parsed_args.deploy_config: if not os.path.isfile(parsed_args.deploy_config): - error_msg = "deploy-config does not exist: %s" % \ - parsed_args.deploy_config + error_msg = ( + f"deploy-config does not exist: {parsed_args.deploy_config}" + ) raise exceptions.DCManagerClientException(error_msg) - files['deploy_config'] = parsed_args.deploy_config + files["deploy_config"] = parsed_args.deploy_config # Get the install values yaml file if parsed_args.install_values: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values if parsed_args.bmc_password: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.group: - data['group_id'] = parsed_args.group + data["group_id"] = parsed_args.group if parsed_args.release: - data['release'] = parsed_args.release + data["release"] = parsed_args.release - return dcmanager_client.subcloud_deploy_create( - files=files, data=data) + return dcmanager_client.subcloud_deploy_create(files=files, data=data) class InstallPhasedSubcloudDeploy(base.DCManagerShowOne): @@ -284,80 +286,81 @@ class InstallPhasedSubcloudDeploy(base.DCManagerShowOne): parser = super(InstallPhasedSubcloudDeploy, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to install.' + "subcloud", help="Name or ID of the subcloud to install." ) parser.add_argument( - '--install-values', + "--install-values", required=False, - help='YAML file containing parameters required for the remote ' - 'install of the subcloud.' + help="YAML file containing parameters required for the remote " + "install of the subcloud.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) parser.add_argument( - '--bmc-password', + "--bmc-password", required=False, - help='bmc password of the subcloud to be configured, ' - 'if not provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, " + "if not provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--release', + "--release", required=False, - help='software release used to install the subcloud with. ' - 'If not specified, the current software release ' - 'of the system controller will be used.' + help="software release used to install the subcloud with. " + "If not specified, the current software release " + "of the system controller will be used.", ) return parser def _get_resources(self, parsed_args): subcloud_ref = parsed_args.subcloud - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager files = dict() data = dict() # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.install_values is not None: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values if parsed_args.bmc_password is not None: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.release is not None: - data['release'] = parsed_args.release + data["release"] = parsed_args.release try: return dcmanager_client.subcloud_deploy_install( - subcloud_ref=subcloud_ref, files=files, data=data) + subcloud_ref=subcloud_ref, files=files, data=data + ) except Exception as e: print(e) - error_msg = "Unable to install subcloud %s" % (subcloud_ref) + error_msg = f"Unable to install subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -371,62 +374,64 @@ class BootstrapPhasedSubcloudDeploy(base.DCManagerShowOne): parser = super().get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to bootstrap.' + "subcloud", help="Name or ID of the subcloud to bootstrap." ) parser.add_argument( - '--bootstrap-address', + "--bootstrap-address", required=False, - help='IP address for initial subcloud controller.' + help="IP address for initial subcloud controller.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=False, - help='YAML file containing parameters required for the bootstrap ' - 'of the subcloud.' + help="YAML file containing parameters required for the bootstrap " + "of the subcloud.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) return parser def _get_resources(self, parsed_args): - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager files = dict() data = dict() if parsed_args.bootstrap_address: - data['bootstrap-address'] = parsed_args.bootstrap_address + data["bootstrap-address"] = parsed_args.bootstrap_address # Get the bootstrap values yaml file if parsed_args.bootstrap_values: if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + "bootstrap-values does not exist: " + f"{parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) subcloud_ref = parsed_args.subcloud return dcmanager_client.subcloud_deploy_bootstrap( - subcloud_ref, files=files, data=data) + subcloud_ref, files=files, data=data + ) class ConfigPhasedSubcloudDeploy(base.DCManagerShowOne): @@ -438,57 +443,56 @@ class ConfigPhasedSubcloudDeploy(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(ConfigPhasedSubcloudDeploy, self).get_parser(prog_name) + parser.add_argument("subcloud", help="Name or ID of the subcloud to update.") + parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to update.' + "--deploy-config", + required=False, + help="YAML file containing parameters required for the initial " + "configuration and unlock of the subcloud.", ) parser.add_argument( - '--deploy-config', + "--sysadmin-password", required=False, - help='YAML file containing parameters required for the initial ' - 'configuration and unlock of the subcloud.' - ) - - parser.add_argument( - '--sysadmin-password', - required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) return parser def _get_resources(self, parsed_args): subcloud_ref = parsed_args.subcloud - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager files = dict() data = dict() # Get the deploy config yaml file if parsed_args.deploy_config is not None: if not os.path.isfile(parsed_args.deploy_config): - error_msg = "deploy-config file does not exist: %s" % \ - parsed_args.deploy_config + error_msg = ( + f"deploy-config file does not exist: {parsed_args.deploy_config}" + ) raise exceptions.DCManagerClientException(error_msg) - files['deploy_config'] = parsed_args.deploy_config + files["deploy_config"] = parsed_args.deploy_config # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) try: return dcmanager_client.subcloud_deploy_config( - subcloud_ref=subcloud_ref, files=files, data=data) + subcloud_ref=subcloud_ref, files=files, data=data + ) except Exception as e: print(e) - error_msg = "Unable to configure subcloud %s" % (subcloud_ref) + error_msg = f"Unable to configure subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -502,21 +506,21 @@ class CompletePhasedSubcloudDeploy(base.DCManagerShowOne): parser = super().get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to complete the deployment.' + "subcloud", help="Name or ID of the subcloud to complete the deployment." ) return parser def _get_resources(self, parsed_args): subcloud_ref = parsed_args.subcloud - dcmanager_client = self.app.client_manager.\ - phased_subcloud_deploy_manager.phased_subcloud_deploy_manager + dcmanager_client = self.app.client_manager.phased_subcloud_deploy_manager.\ + phased_subcloud_deploy_manager try: return dcmanager_client.subcloud_deploy_complete(subcloud_ref) except Exception as e: print(e) - error_msg = "Unable to complete the deployment of subcloud %s" % ( - subcloud_ref) + error_msg = ( + f"Unable to complete the deployment of subcloud {subcloud_ref}" + ) raise exceptions.DCManagerClientException(error_msg) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_backup_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_backup_manager.py index 0fe3a67..2f4442a 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_backup_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_backup_manager.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2022-2023 Wind River Systems, Inc. +# Copyright (c) 2022-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -9,31 +9,30 @@ import os from osc_lib.command import command +from dcmanagerclient import exceptions, utils from dcmanagerclient.commands.v1 import base -from dcmanagerclient import exceptions -from dcmanagerclient import utils def detail_format(subcloud=None): columns = ( - 'id', - 'name', - 'description', - 'location', - 'software_version', - 'management', - 'availability', - 'deploy_status', - 'management_subnet', - 'management_start_ip', - 'management_end_ip', - 'management_gateway_ip', - 'systemcontroller_gateway_ip', - 'group_id', - 'created_at', - 'updated_at', - 'backup_status', - 'backup_datetime', + "id", + "name", + "description", + "location", + "software_version", + "management", + "availability", + "deploy_status", + "management_subnet", + "management_start_ip", + "management_end_ip", + "management_gateway_ip", + "systemcontroller_gateway_ip", + "group_id", + "created_at", + "updated_at", + "backup_status", + "backup_datetime", ) if subcloud: @@ -59,17 +58,16 @@ def detail_format(subcloud=None): ) for _listitem, sync_status in enumerate(subcloud.endpoint_sync_status): - added_field = (sync_status['endpoint_type'] + - "_sync_status",) - added_value = (sync_status['sync_status'],) + added_field = (sync_status["endpoint_type"] + "_sync_status",) + added_value = (sync_status["sync_status"],) columns += tuple(added_field) data += tuple(added_value) if subcloud.oam_floating_ip != "unavailable": - columns += ('oam_floating_ip',) + columns += ("oam_floating_ip",) data += (subcloud.oam_floating_ip,) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -87,47 +85,47 @@ class CreateSubcloudBackup(base.DCManagerShow): parser = super(CreateSubcloudBackup, self).get_parser(prog_name) parser.add_argument( - '--local-only', + "--local-only", required=False, - action='store_true', - help='If included, backup files will be stored on the subcloud. ' - 'Otherwise, they will be transferred and stored in ' - 'dedicated location on the system controller.' + action="store_true", + help="If included, backup files will be stored on the subcloud. " + "Otherwise, they will be transferred and stored in " + "dedicated location on the system controller.", ) parser.add_argument( - '--registry-images', + "--registry-images", required=False, - action='store_true', - help='If included, container images backup file will also be ' - 'generated. This option can only be used with --local-only ' - 'option.' + action="store_true", + help="If included, container images backup file will also be " + "generated. This option can only be used with --local-only " + "option.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to create backup, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to create backup, " + "if not provided you will be prompted.", ) parser.add_argument( - '--backup-values', + "--backup-values", required=False, - help='YAML file containing subcloud backup settings. ' - 'Can be either a local file path or a URL.' + help="YAML file containing subcloud backup settings. " + "Can be either a local file path or a URL.", ) parser.add_argument( - '--subcloud', + "--subcloud", required=False, - help='Name or ID of the subcloud to create backup.' + help="Name or ID of the subcloud to create backup.", ) parser.add_argument( - '--group', + "--group", required=False, - help='Name or ID of the group to create backup.' + help="Name or ID of the group to create backup.", ) return parser @@ -138,56 +136,66 @@ class CreateSubcloudBackup(base.DCManagerShow): files = dict() if not parsed_args.subcloud and not parsed_args.group: - error_msg = ('Please provide the subcloud or subcloud group' - ' name or id.') + error_msg = ( + "Please provide the subcloud or subcloud group name or id." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud and parsed_args.group: - error_msg = ('The command only applies to a single subcloud ' - 'or a subcloud group, not both.') + error_msg = ( + "The command only applies to a single subcloud " + "or a subcloud group, not both." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud: - data['subcloud'] = parsed_args.subcloud + data["subcloud"] = parsed_args.subcloud if parsed_args.group: - data['group'] = parsed_args.group + data["group"] = parsed_args.group if not parsed_args.local_only and parsed_args.registry_images: - error_msg = ('Option --registry-images can not be used without ' - '--local-only option.') + error_msg = ( + "Option --registry-images can not be used without " + "--local-only option." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.local_only: - data['local_only'] = 'true' + data["local_only"] = "true" else: - data['local_only'] = 'false' + data["local_only"] = "false" if parsed_args.registry_images: - data['registry_images'] = 'true' + data["registry_images"] = "true" else: - data['registry_images'] = 'false' + data["registry_images"] = "false" if parsed_args.sysadmin_password is not None: # The binary base64 encoded string (eg. b'dGVzdA==') is not JSON # serializable in Python3.x, so it has to be decoded to a JSON # serializable string (eg. 'dGVzdA=='). - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password() data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") if parsed_args.backup_values: if not os.path.isfile(parsed_args.backup_values): - error_msg = "Backup-values file does not exist: %s" % \ - parsed_args.backup_values + error_msg = ( + f"Backup-values file does not exist: {parsed_args.backup_values}" + ) + raise exceptions.DCManagerClientException(error_msg) - files['backup_values'] = parsed_args.backup_values + files["backup_values"] = parsed_args.backup_values try: - return dcmanager_client.subcloud_backup_manager.\ - backup_subcloud_create(data=data, files=files) + return dcmanager_client.subcloud_backup_manager.backup_subcloud_create( + data=data, files=files + ) except Exception as e: print(e) @@ -205,36 +213,35 @@ class DeleteSubcloudBackup(command.Command): parser = super(DeleteSubcloudBackup, self).get_parser(prog_name) parser.add_argument( - 'release', - help='Release version that the user is trying to delete.' + "release", help="Release version that the user is trying to delete." ) parser.add_argument( - '--local-only', + "--local-only", required=False, - action='store_true', - help='If included, backup files will be deleted from the ' - 'subcloud. Otherwise, they will be deleted from the ' - 'centralized archive on the system controller.' + action="store_true", + help="If included, backup files will be deleted from the " + "subcloud. Otherwise, they will be deleted from the " + "centralized archive on the system controller.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to delete backup, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to delete backup, " + "if not provided you will be prompted.", ) parser.add_argument( - '--subcloud', + "--subcloud", required=False, - help='Name or ID of the subcloud to delete backup.' + help="Name or ID of the subcloud to delete backup.", ) parser.add_argument( - '--group', + "--group", required=False, - help='Name or ID of the subcloud to delete backup.' + help="Name or ID of the subcloud to delete backup.", ) return parser @@ -245,42 +252,46 @@ class DeleteSubcloudBackup(command.Command): subcloud_ref = parsed_args.subcloud data = dict() - data['release'] = parsed_args.release + data["release"] = parsed_args.release if not parsed_args.subcloud and not parsed_args.group: - error_msg = ('Please provide the subcloud or subcloud group' - ' name or id.') + error_msg = ( + "Please provide the subcloud or subcloud group name or id." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud and parsed_args.group: - error_msg = ('This command only applies to a single subcloud ' - 'or a subcloud group, not both.') + error_msg = ( + "This command only applies to a single subcloud " + "or a subcloud group, not both." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud: - data['subcloud'] = parsed_args.subcloud + data["subcloud"] = parsed_args.subcloud if parsed_args.group: - data['group'] = parsed_args.group + data["group"] = parsed_args.group if parsed_args.local_only: - data['local_only'] = 'true' + data["local_only"] = "true" else: - data['local_only'] = 'false' + data["local_only"] = "false" if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") elif not parsed_args.sysadmin_password and parsed_args.local_only: password = utils.prompt_for_password() data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") try: - return dcmanager_client.subcloud_backup_manager.\ - backup_subcloud_delete(subcloud_ref=subcloud_ref, - release_version=release_version, - data=data) + return dcmanager_client.subcloud_backup_manager.backup_subcloud_delete( + subcloud_ref=subcloud_ref, release_version=release_version, data=data + ) except Exception as e: print(e) @@ -301,65 +312,64 @@ class RestoreSubcloudBackup(base.DCManagerShow): parser = super(RestoreSubcloudBackup, self).get_parser(prog_name) parser.add_argument( - '--with-install', + "--with-install", required=False, - action='store_true', - help='If included, the subcloud will be reinstalled prior to ' - 'being restored from backup data.' + action="store_true", + help="If included, the subcloud will be reinstalled prior to " + "being restored from backup data.", ) parser.add_argument( - '--release', + "--release", required=False, - help='Software release used to install, bootstrap and/or deploy ' - 'the subcloud with. If not specified, the current software ' - 'release of the system controller will be used.' + help="Software release used to install, bootstrap and/or deploy " + "the subcloud with. If not specified, the current software " + "release of the system controller will be used.", ) parser.add_argument( - '--local-only', + "--local-only", required=False, - action='store_true', - help='If included, the subcloud will be restored from backup data ' - 'previously saved on the subcloud. Otherwise, it will be ' - 'restored from backup data previously saved on the system ' - 'controller.' + action="store_true", + help="If included, the subcloud will be restored from backup data " + "previously saved on the subcloud. Otherwise, it will be " + "restored from backup data previously saved on the system " + "controller.", ) parser.add_argument( - '--registry-images', + "--registry-images", required=False, - action='store_true', - help='If included, user images will be restored post platform ' - 'restore. This option can only be used with --local-only ' - 'option.' - + action="store_true", + help="If included, user images will be restored post platform " + "restore. This option can only be used with --local-only " + "option.", ) parser.add_argument( - '--restore-values', + "--restore-values", required=False, - help='Reference to the restore playbook overrides yaml file, as ' - 'listed in the product documentation for the ansible restore.' + help="Reference to the restore playbook overrides yaml file, as " + "listed in the product documentation for the ansible restore.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be restored, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be restored, " + "if not provided you will be prompted.", ) parser.add_argument( - '--subcloud', + "--subcloud", required=False, - help='Name or ID of the subcloud to restore.' + help="Name or ID of the subcloud to restore.", ) parser.add_argument( - '--group', + "--group", required=False, - help='Name or ID of the subcloud group to restore.' + help="Name or ID of the subcloud group to restore.", ) return parser @@ -371,67 +381,78 @@ class RestoreSubcloudBackup(base.DCManagerShow): files = dict() if not parsed_args.subcloud and not parsed_args.group: - error_msg = ('Please provide the subcloud or subcloud group' - ' name or id.') + error_msg = ( + "Please provide the subcloud or subcloud group name or id." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud and parsed_args.group: - error_msg = ('The command only applies to a single subcloud ' - 'or a subcloud group, not both.') + error_msg = ( + "The command only applies to a single subcloud " + "or a subcloud group, not both." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud: - data['subcloud'] = parsed_args.subcloud + data["subcloud"] = parsed_args.subcloud if parsed_args.group: - data['group'] = parsed_args.group + data["group"] = parsed_args.group if not parsed_args.local_only and parsed_args.registry_images: - error_msg = ('Option --registry-images cannot be used without ' - '--local-only option.') + error_msg = ( + "Option --registry-images cannot be used without " + "--local-only option." + ) raise exceptions.DCManagerClientException(error_msg) if not parsed_args.with_install and parsed_args.release: - error_msg = ('Option --release cannot be used without ' - '--with-install option.') + error_msg = ( + "Option --release cannot be used without --with-install option." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.with_install: - data['with_install'] = 'true' + data["with_install"] = "true" else: - data['with_install'] = 'false' + data["with_install"] = "false" if parsed_args.local_only: - data['local_only'] = 'true' + data["local_only"] = "true" else: - data['local_only'] = 'false' + data["local_only"] = "false" if parsed_args.registry_images: - data['registry_images'] = 'true' + data["registry_images"] = "true" else: - data['registry_images'] = 'false' + data["registry_images"] = "false" if parsed_args.release is not None: - data['release'] = parsed_args.release + data["release"] = parsed_args.release if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password() data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") if parsed_args.restore_values: if not os.path.isfile(parsed_args.restore_values): - error_msg = "Restore_values file does not exist: %s" % \ - parsed_args.restore_values + error_msg = ( + "restore_values file does not exist: " + f"{parsed_args.restore_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['restore_values'] = parsed_args.restore_values + files["restore_values"] = parsed_args.restore_values try: - return dcmanager_client.subcloud_backup_manager.\ - backup_subcloud_restore(data=data, files=files) + return dcmanager_client.subcloud_backup_manager.backup_subcloud_restore( + data=data, files=files + ) except Exception as e: print(e) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_deploy_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_deploy_manager.py index ea354d8..40cbd1b 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_deploy_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2023 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,9 +15,10 @@ import os +from osc_lib.command import command + from dcmanagerclient.commands.v1 import base from dcmanagerclient import exceptions -from osc_lib.command import command def _format(subcloud_deploy=None): diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_group_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_group_manager.py index df9337a..362d914 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_group_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_group_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,18 +16,19 @@ from osc_lib.command import command -from dcmanagerclient.commands.v1 import base -from dcmanagerclient.commands.v1.subcloud_manager import detail_format -from dcmanagerclient.commands.v1.subcloud_manager \ - import update_fields_values from dcmanagerclient import exceptions +from dcmanagerclient.commands.v1 import base +from dcmanagerclient.commands.v1.subcloud_manager import ( + detail_format, + update_fields_values, +) def group_format(subcloud_group=None): columns = ( - 'id', - 'name', - 'description', + "id", + "name", + "description", ) if subcloud_group: @@ -38,7 +39,7 @@ def group_format(subcloud_group=None): ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -47,13 +48,13 @@ def detail_group_format(subcloud_group=None): # Include all the fields in group_format # plus some additional fields columns = ( - 'id', - 'name', - 'description', - 'update apply type', - 'max parallel subclouds', - 'created_at', - 'updated_at', + "id", + "name", + "description", + "update apply type", + "max parallel subclouds", + "created_at", + "updated_at", ) if subcloud_group: @@ -67,7 +68,7 @@ def detail_group_format(subcloud_group=None): subcloud_group.updated_at, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -82,30 +83,28 @@ class AddSubcloudGroup(base.DCManagerShowOne): parser = super(AddSubcloudGroup, self).get_parser(prog_name) parser.add_argument( - '--name', - required=True, - help='Name for the new subcloud group.' + "--name", required=True, help="Name for the new subcloud group." ) parser.add_argument( - '--description', + "--description", required=False, - default='No description provided', - help='Description of new subcloud group.' + default="No description provided", + help="Description of new subcloud group.", ) parser.add_argument( - '--update_apply_type', + "--update_apply_type", required=False, - default='parallel', - help='apply type for the new subcloud group.' + default="parallel", + help="apply type for the new subcloud group.", ) parser.add_argument( - '--max_parallel_subclouds', + "--max_parallel_subclouds", required=False, default=2, - help='max parallel subclouds for the new subcloud group.' + help="max parallel subclouds for the new subcloud group.", ) return parser @@ -114,19 +113,17 @@ class AddSubcloudGroup(base.DCManagerShowOne): kwargs = dict() if parsed_args.name is not None: - kwargs['name'] = parsed_args.name + kwargs["name"] = parsed_args.name if parsed_args.description is not None: - kwargs['description'] = parsed_args.description + kwargs["description"] = parsed_args.description if parsed_args.update_apply_type is not None: - kwargs['update_apply_type'] = parsed_args.update_apply_type + kwargs["update_apply_type"] = parsed_args.update_apply_type if parsed_args.max_parallel_subclouds is not None: - kwargs['max_parallel_subclouds'] = \ - parsed_args.max_parallel_subclouds - return dcmanager_client.subcloud_group_manager.add_subcloud_group( - **kwargs) + kwargs["max_parallel_subclouds"] = parsed_args.max_parallel_subclouds + return dcmanager_client.subcloud_group_manager.add_subcloud_group(**kwargs) class ListSubcloudGroup(base.DCManagerLister): @@ -153,16 +150,19 @@ class ListSubcloudGroupSubclouds(base.DCManagerLister): def get_parser(self, prog_name): parser = super(ListSubcloudGroupSubclouds, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of subcloud group to list associated subclouds.' + "group", + help="Name or ID of subcloud group to list associated subclouds.", ) return parser def _get_resources(self, parsed_args): subcloud_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_group_manager - result = dcmanager_client.subcloud_group_manager. \ - subcloud_group_list_subclouds(subcloud_group_ref) + result = ( + dcmanager_client.subcloud_group_manager.subcloud_group_list_subclouds( + subcloud_group_ref + ) + ) update_fields_values(result) return result @@ -177,8 +177,7 @@ class ShowSubcloudGroup(base.DCManagerShowOne): parser = super(ShowSubcloudGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of subcloud group to view the details.' + "group", help="Name or ID of subcloud group to view the details." ) return parser @@ -186,8 +185,9 @@ class ShowSubcloudGroup(base.DCManagerShowOne): def _get_resources(self, parsed_args): subcloud_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_group_manager - return dcmanager_client.subcloud_group_manager.\ - subcloud_group_detail(subcloud_group_ref) + return dcmanager_client.subcloud_group_manager.subcloud_group_detail( + subcloud_group_ref + ) class DeleteSubcloudGroup(command.Command): @@ -197,8 +197,7 @@ class DeleteSubcloudGroup(command.Command): parser = super(DeleteSubcloudGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of the subcloud group to delete.' + "group", help="Name or ID of the subcloud group to delete." ) return parser @@ -206,11 +205,12 @@ class DeleteSubcloudGroup(command.Command): subcloud_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_group_manager try: - dcmanager_client.subcloud_group_manager.\ - delete_subcloud_group(subcloud_group_ref) + dcmanager_client.subcloud_group_manager.delete_subcloud_group( + subcloud_group_ref + ) except Exception as e: print(e) - msg = "Unable to delete subcloud group %s" % (subcloud_group_ref) + msg = f"Unable to delete subcloud group {subcloud_group_ref}" raise exceptions.DCManagerClientException(msg) @@ -224,33 +224,26 @@ class UpdateSubcloudGroup(base.DCManagerShowOne): parser = super(UpdateSubcloudGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of the subcloud group to update.' + "group", help="Name or ID of the subcloud group to update." + ) + + parser.add_argument("--name", required=False, help="Name of subcloud group.") + + parser.add_argument( + "--description", required=False, help="Description of subcloud group." ) parser.add_argument( - '--name', + "--update_apply_type", required=False, - help='Name of subcloud group.' + help="Update apply type of subcloud group.", ) parser.add_argument( - '--description', - required=False, - help='Description of subcloud group.' - ) - - parser.add_argument( - '--update_apply_type', - required=False, - help='Update apply type of subcloud group.' - ) - - parser.add_argument( - '--max_parallel_subclouds', + "--max_parallel_subclouds", type=int, required=False, - help='max parallel subclouds of subcloud group.' + help="max parallel subclouds of subcloud group.", ) return parser @@ -260,23 +253,22 @@ class UpdateSubcloudGroup(base.DCManagerShowOne): dcmanager_client = self.app.client_manager.subcloud_group_manager kwargs = dict() if parsed_args.name: - kwargs['name'] = parsed_args.name + kwargs["name"] = parsed_args.name if parsed_args.description: - kwargs['description'] = parsed_args.description + kwargs["description"] = parsed_args.description if parsed_args.update_apply_type: - kwargs['update_apply_type'] = parsed_args.update_apply_type + kwargs["update_apply_type"] = parsed_args.update_apply_type if parsed_args.max_parallel_subclouds: - kwargs['max_parallel_subclouds'] = \ - parsed_args.max_parallel_subclouds + kwargs["max_parallel_subclouds"] = parsed_args.max_parallel_subclouds if len(kwargs) == 0: error_msg = "Nothing to update" raise exceptions.DCManagerClientException(error_msg) try: - return dcmanager_client. \ - subcloud_group_manager.update_subcloud_group( - subcloud_group_ref, **kwargs) + return dcmanager_client.subcloud_group_manager.update_subcloud_group( + subcloud_group_ref, **kwargs + ) except Exception as e: print(e) - msg = "Unable to update subcloud group %s" % (subcloud_group_ref) + msg = f"Unable to update subcloud group {subcloud_group_ref}" raise exceptions.DCManagerClientException(msg) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_manager.py index 5c17bff..414504b 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_manager.py @@ -16,30 +16,26 @@ import base64 import os -import six +import six from osc_lib.command import command +from dcmanagerclient import exceptions, utils from dcmanagerclient.commands.v1 import base -from dcmanagerclient import exceptions -from dcmanagerclient import utils + +SET_FIELD_VALUE_DICT = {"region_name": None} -SET_FIELD_VALUE_DICT = { - "region_name": None -} - - -def format(subcloud=None): +def basic_format(subcloud=None): columns = ( - 'id', - 'name', - 'management', - 'availability', - 'deploy status', - 'sync', - 'backup status', - 'prestage status' + "id", + "name", + "management", + "availability", + "deploy status", + "sync", + "backup status", + "prestage status", ) if subcloud: @@ -51,38 +47,38 @@ def format(subcloud=None): subcloud.deploy_status, subcloud.sync_status, subcloud.backup_status, - subcloud.prestage_status + subcloud.prestage_status, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data def basic_detail_format(subcloud=None): columns = ( - 'id', - 'name', - 'description', - 'location', - 'software_version', - 'management', - 'availability', - 'deploy_status', - 'management_subnet', - 'management_start_ip', - 'management_end_ip', - 'management_gateway_ip', - 'systemcontroller_gateway_ip', - 'group_id', - 'peer_group_id', - 'created_at', - 'updated_at', - 'backup_status', - 'backup_datetime', - 'prestage_status', - 'prestage_versions', + "id", + "name", + "description", + "location", + "software_version", + "management", + "availability", + "deploy_status", + "management_subnet", + "management_start_ip", + "management_end_ip", + "management_gateway_ip", + "systemcontroller_gateway_ip", + "group_id", + "peer_group_id", + "created_at", + "updated_at", + "backup_status", + "backup_datetime", + "prestage_status", + "prestage_versions", ) if subcloud: @@ -110,7 +106,7 @@ def basic_detail_format(subcloud=None): subcloud.prestage_versions, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -119,18 +115,17 @@ def detail_format(subcloud=None): columns, data = basic_detail_format(subcloud) if subcloud: for _listitem, sync_status in enumerate(subcloud.endpoint_sync_status): - added_field = (sync_status['endpoint_type'] + - "_sync_status",) - added_value = (sync_status['sync_status'],) + added_field = (sync_status["endpoint_type"] + "_sync_status",) + added_value = (sync_status["sync_status"],) columns += tuple(added_field) data += tuple(added_value) if subcloud.oam_floating_ip != "unavailable": - columns += ('oam_floating_ip',) + columns += ("oam_floating_ip",) data += (subcloud.oam_floating_ip,) if subcloud.deploy_config_sync_status != "unknown": - columns += ('deploy_config_sync_status',) + columns += ("deploy_config_sync_status",) data += (subcloud.deploy_config_sync_status,) return columns, data @@ -140,7 +135,7 @@ def detail_prestage_format(subcloud=None): columns, data = detail_format(subcloud) if subcloud and subcloud.prestage_software_version: - columns += ('prestage_software_version',) + columns += ("prestage_software_version",) data += (subcloud.prestage_software_version,) return columns, data @@ -149,7 +144,7 @@ def detail_prestage_format(subcloud=None): def detail_show_format(subcloud=None): columns, data = detail_format(subcloud) if subcloud: - columns += ('region_name',) + columns += ("region_name",) data += (subcloud.region_name,) return columns, data @@ -158,17 +153,23 @@ def detail_list_format(subcloud=None): columns, data = basic_detail_format(subcloud) # Find the index of 'deploy_status' in the tuple - deploy_status_index = columns.index('deploy_status') + deploy_status_index = columns.index("deploy_status") # Insert "sync" field after 'deploy_status' - columns = columns[:deploy_status_index + 1] + ("sync",) + \ - columns[deploy_status_index + 1:] + columns = ( + columns[: deploy_status_index + 1] + + ("sync",) + + columns[deploy_status_index + 1 :] + ) if subcloud: - data = data[:deploy_status_index + 1] + (subcloud.sync_status,) + \ - data[deploy_status_index + 1:] + data = ( + data[: deploy_status_index + 1] + + (subcloud.sync_status,) + + data[deploy_status_index + 1 :] + ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -178,14 +179,13 @@ def detail_list_format(subcloud=None): # The other commands do not required it, since the output should # not show that field def update_fields_values(result): - if len(result) == 0: return - for i in range(len(result)): + for item in result: for field, value in SET_FIELD_VALUE_DICT.items(): - if field in dir(result[i]): - setattr(result[i], field, value) + if field in dir(item): + setattr(item, field, value) class AddSubcloud(base.DCManagerShowOne): @@ -197,73 +197,67 @@ class AddSubcloud(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(AddSubcloud, self).get_parser(prog_name) - parser.add_argument( - '--name', - required=False, - help='Subcloud name' - ) + parser.add_argument("--name", required=False, help="Subcloud name") parser.add_argument( - '--bootstrap-address', + "--bootstrap-address", required=True, - help='IP address for initial subcloud controller.' + help="IP address for initial subcloud controller.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=True, - help='YAML file containing parameters required for the bootstrap ' - 'of the subcloud.' + help="YAML file containing parameters required for the bootstrap " + "of the subcloud.", ) parser.add_argument( - '--deploy-config', + "--deploy-config", required=False, - help='YAML file containing parameters required for the initial ' - 'configuration and unlock of the subcloud.' + help="YAML file containing parameters required for the initial " + "configuration and unlock of the subcloud.", ) parser.add_argument( - '--install-values', + "--install-values", required=False, - help='YAML file containing parameters required for the ' - 'remote install of the subcloud.' + help="YAML file containing parameters required for the " + "remote install of the subcloud.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) parser.add_argument( - '--bmc-password', + "--bmc-password", required=False, - help='bmc password of the subcloud to be configured, ' - 'if not provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, " + "if not provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--group', - required=False, - help='Name or ID of subcloud group.' + "--group", required=False, help="Name or ID of subcloud group." ) parser.add_argument( - '--migrate', + "--migrate", required=False, - action='store_true', - help='Migrate a subcloud from another distributed cloud.' + action="store_true", + help="Migrate a subcloud from another distributed cloud.", ) parser.add_argument( - '--release', + "--release", required=False, - help='software release used to install, bootstrap and/or deploy ' - 'the subcloud with. If not specified, the current software ' - 'release of the system controller will be used.' + help="software release used to install, bootstrap and/or deploy " + "the subcloud with. If not specified, the current software " + "release of the system controller will be used.", ) return parser @@ -272,22 +266,25 @@ class AddSubcloud(base.DCManagerShowOne): dcmanager_client = self.app.client_manager.subcloud_manager files = dict() data = dict() - data['bootstrap-address'] = parsed_args.bootstrap_address + data["bootstrap-address"] = parsed_args.bootstrap_address # Get the install values yaml file if parsed_args.install_values is not None: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) + raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values # Get the bootstrap values yaml file if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + f"bootstrap-values does not exist: {parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values # Get the deploy config yaml file if parsed_args.deploy_config is not None: @@ -296,48 +293,50 @@ class AddSubcloud(base.DCManagerShowOne): raise exceptions.DCManagerClientException(error_msg) if not os.path.isfile(parsed_args.deploy_config): - error_msg = "deploy-config does not exist: %s" % \ - parsed_args.deploy_config + error_msg = ( + f"deploy-config does not exist: {parsed_args.deploy_config}" + ) raise exceptions.DCManagerClientException(error_msg) - files['deploy_config'] = parsed_args.deploy_config + files["deploy_config"] = parsed_args.deploy_config # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.install_values is not None: if parsed_args.bmc_password is not None: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.group is not None: - data['group_id'] = parsed_args.group + data["group_id"] = parsed_args.group if parsed_args.migrate: - data['migrate'] = 'true' + data["migrate"] = "true" if parsed_args.release is not None: - data['release'] = parsed_args.release + data["release"] = parsed_args.release if parsed_args.name is not None: if parsed_args.migrate: - data['name'] = parsed_args.name + data["name"] = parsed_args.name else: - error_msg = 'The --name option can only be used with \ - --migrate option.' + error_msg = "The --name option can only be used with \ + --migrate option." raise exceptions.DCManagerClientException(error_msg) - result = dcmanager_client.subcloud_manager.add_subcloud(files=files, - data=data) + result = dcmanager_client.subcloud_manager.add_subcloud( + files=files, data=data + ) update_fields_values(result) return result @@ -352,25 +351,27 @@ class ListSubcloud(base.DCManagerLister): self.show_basic_list = True def _validate_parsed_args(self, parsed_args): - self.show_basic_list = \ + self.show_basic_list = ( False if parsed_args.columns or parsed_args.detail else True + ) def _get_format_function(self): - return format if self.show_basic_list else detail_list_format + return basic_format if self.show_basic_list else detail_list_format def get_parser(self, prog_name): parser = super(ListSubcloud, self).get_parser(prog_name) parser.add_argument( - '--all', + "--all", required=False, - action='store_true', - help='List all subclouds include "secondary" state subclouds' + action="store_true", + help='List all subclouds include "secondary" state subclouds', ) parser.add_argument( - '-d', '--detail', + "-d", + "--detail", required=False, - action='store_true', - help="List all columns of the subclouds" + action="store_true", + help="List all columns of the subclouds", ) return parser @@ -383,8 +384,11 @@ class ListSubcloud(base.DCManagerLister): # state subclouds. if parsed_args.all: return subclouds - filtered_subclouds = [s for s in subclouds if s.deploy_status not in - ('secondary', 'secondary-failed')] + filtered_subclouds = [ + s + for s in subclouds + if s.deploy_status not in ("secondary", "secondary-failed") + ] return filtered_subclouds @@ -398,14 +402,14 @@ class ShowSubcloud(base.DCManagerShowOne): parser = super(ShowSubcloud, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of subcloud to view the details.' + "subcloud", help="Name or ID of subcloud to view the details." ) parser.add_argument( - '-d', '--detail', - action='store_true', - help="Show additional details for a subcloud" + "-d", + "--detail", + action="store_true", + help="Show additional details for a subcloud", ) return parser @@ -414,11 +418,11 @@ class ShowSubcloud(base.DCManagerShowOne): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.subcloud_manager if parsed_args.detail: - return dcmanager_client.subcloud_manager.\ - subcloud_additional_details(subcloud_ref) + return dcmanager_client.subcloud_manager.subcloud_additional_details( + subcloud_ref + ) else: - return dcmanager_client.subcloud_manager.\ - subcloud_detail(subcloud_ref) + return dcmanager_client.subcloud_manager.subcloud_detail(subcloud_ref) class ShowSubcloudError(command.Command): @@ -428,8 +432,7 @@ class ShowSubcloudError(command.Command): parser = super(ShowSubcloudError, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of subcloud to view the errors details.' + "subcloud", help="Name or ID of subcloud to view the errors details." ) return parser @@ -438,7 +441,7 @@ class ShowSubcloudError(command.Command): dcmanager_client = self.app.client_manager.subcloud_manager ret = dcmanager_client.subcloud_manager.subcloud_detail(subcloud_ref) data = ret[0].error_description - print(''.join(data)) + print("".join(data)) class DeleteSubcloud(command.Command): @@ -447,10 +450,7 @@ class DeleteSubcloud(command.Command): def get_parser(self, prog_name): parser = super(DeleteSubcloud, self).get_parser(prog_name) - parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to delete.' - ) + parser.add_argument("subcloud", help="Name or ID of the subcloud to delete.") return parser def take_action(self, parsed_args): @@ -460,7 +460,7 @@ class DeleteSubcloud(command.Command): dcmanager_client.subcloud_manager.delete_subcloud(subcloud_ref) except Exception as e: print(e) - error_msg = "Unable to delete subcloud %s" % (subcloud_ref) + error_msg = f"Unable to delete subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -474,15 +474,14 @@ class UnmanageSubcloud(base.DCManagerShowOne): parser = super(UnmanageSubcloud, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to unmanage.' + "subcloud", help="Name or ID of the subcloud to unmanage." ) parser.add_argument( - '--migrate', + "--migrate", required=False, - action='store_true', - help='Mark the subcloud for an upcoming migration.' + action="store_true", + help="Mark the subcloud for an upcoming migration.", ) return parser @@ -491,19 +490,20 @@ class UnmanageSubcloud(base.DCManagerShowOne): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.subcloud_manager kwargs = dict() - kwargs['management-state'] = 'unmanaged' + kwargs["management-state"] = "unmanaged" if parsed_args.migrate: - kwargs['migrate'] = 'true' + kwargs["migrate"] = "true" try: result = dcmanager_client.subcloud_manager.update_subcloud( - subcloud_ref, files=None, data=kwargs) + subcloud_ref, files=None, data=kwargs + ) update_fields_values(result) return result except Exception as e: print(e) - error_msg = "Unable to unmanage subcloud %s" % (subcloud_ref) + error_msg = f"Unable to unmanage subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -516,17 +516,14 @@ class ManageSubcloud(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(ManageSubcloud, self).get_parser(prog_name) - parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to manage.' - ) + parser.add_argument("subcloud", help="Name or ID of the subcloud to manage.") parser.add_argument( - '--force', + "--force", required=False, - action='store_true', - help='Disregard subcloud availability status, intended for \ - some upgrade recovery scenarios.' + action="store_true", + help="Disregard subcloud availability status, intended for \ + some upgrade recovery scenarios.", ) return parser @@ -534,18 +531,19 @@ class ManageSubcloud(base.DCManagerShowOne): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.subcloud_manager kwargs = dict() - kwargs['management-state'] = 'managed' + kwargs["management-state"] = "managed" if parsed_args.force: - kwargs['force'] = 'true' + kwargs["force"] = "true" try: result = dcmanager_client.subcloud_manager.update_subcloud( - subcloud_ref, files=None, data=kwargs) + subcloud_ref, files=None, data=kwargs + ) update_fields_values(result) return result except Exception as e: print(e) - error_msg = "Unable to manage subcloud %s" % (subcloud_ref) + error_msg = f"Unable to manage subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -558,96 +556,79 @@ class UpdateSubcloud(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(UpdateSubcloud, self).get_parser(prog_name) + parser.add_argument("subcloud", help="Name or ID of the subcloud to update.") + + parser.add_argument("--name", required=False, help="Name of subcloud.") + parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to update.' + "--description", required=False, help="Description of subcloud." ) parser.add_argument( - '--name', - required=False, - help='Name of subcloud.' + "--location", required=False, help="Location of subcloud." ) parser.add_argument( - '--description', - required=False, - help='Description of subcloud.' + "--group", required=False, help="Name or ID of subcloud group." ) parser.add_argument( - '--location', - required=False, - help='Location of subcloud.' + "--management-subnet", required=False, help="Network subnet of subcloud." ) parser.add_argument( - '--group', + "--management-gateway-ip", required=False, - help='Name or ID of subcloud group.' + help="Network gateway IP of subcloud.", ) parser.add_argument( - '--management-subnet', + "--management-start-ip", required=False, - help='Network subnet of subcloud.' + help="Network start IP of subcloud.", ) parser.add_argument( - '--management-gateway-ip', - required=False, - help='Network gateway IP of subcloud.' + "--management-end-ip", required=False, help="Network end IP of subcloud." ) parser.add_argument( - '--management-start-ip', + "--sysadmin-password", required=False, - help='Network start IP of subcloud.' + help="sysadmin password of the subcloud to be updated, " + "if not provided you will be prompted.", ) parser.add_argument( - '--management-end-ip', + "--bootstrap-address", required=False, - help='Network end IP of subcloud.' + help="bootstrap address of the subcloud to be updated.", ) parser.add_argument( - '--sysadmin-password', + "--install-values", required=False, - help='sysadmin password of the subcloud to be updated, ' - 'if not provided you will be prompted.' + help="YAML file containing parameters required for the " + "remote install of the subcloud.", ) parser.add_argument( - '--bootstrap-address', + "--bmc-password", required=False, - help='bootstrap address of the subcloud to be updated.' - ) - - parser.add_argument( - '--install-values', - required=False, - help='YAML file containing parameters required for the ' - 'remote install of the subcloud.' - ) - - parser.add_argument( - '--bmc-password', - required=False, - help='bmc password of the subcloud to be configured, if not ' - 'provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, if not " + "provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=False, - help='YAML file containing subcloud configuration settings. ' - 'Can be either a local file path or a URL.' + help="YAML file containing subcloud configuration settings. " + "Can be either a local file path or a URL.", ) parser.add_argument( - '--peer-group', + "--peer-group", required=False, - help='Name or ID of subcloud peer group (for migrate).' + help="Name or ID of subcloud peer group (for migrate).", ) return parser @@ -658,81 +639,88 @@ class UpdateSubcloud(base.DCManagerShowOne): data = dict() if parsed_args.name: - data['name'] = parsed_args.name + data["name"] = parsed_args.name if parsed_args.description: - data['description'] = parsed_args.description + data["description"] = parsed_args.description if parsed_args.location: - data['location'] = parsed_args.location + data["location"] = parsed_args.location if parsed_args.group: - data['group_id'] = parsed_args.group + data["group_id"] = parsed_args.group if parsed_args.management_subnet: - data['management_subnet'] = parsed_args.management_subnet + data["management_subnet"] = parsed_args.management_subnet if parsed_args.management_gateway_ip: - data['management_gateway_ip'] = parsed_args.management_gateway_ip + data["management_gateway_ip"] = parsed_args.management_gateway_ip if parsed_args.management_start_ip: - data['management_start_ip'] = parsed_args.management_start_ip + data["management_start_ip"] = parsed_args.management_start_ip if parsed_args.management_end_ip: - data['management_end_ip'] = parsed_args.management_end_ip + data["management_end_ip"] = parsed_args.management_end_ip if parsed_args.bootstrap_address: - data['bootstrap_address'] = parsed_args.bootstrap_address + data["bootstrap_address"] = parsed_args.bootstrap_address if parsed_args.peer_group: - data['peer_group'] = parsed_args.peer_group + data["peer_group"] = parsed_args.peer_group subcloud_network_values = [ - data.get('management_subnet'), - data.get('management_gateway_ip'), - data.get('management_start_ip'), - data.get('management_end_ip'), - data.get('bootstrap_address') + data.get("management_subnet"), + data.get("management_gateway_ip"), + data.get("management_start_ip"), + data.get("management_end_ip"), + data.get("bootstrap_address"), ] # Semantic check if the required arguments for updating admin network if all(value is not None for value in subcloud_network_values): # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + password.encode("utf-8") + ) # For subcloud network reconfiguration # If any management_* presents, need all # management_subnet/management_gateway_ip/ # management_start_ip/management_end_ip/bootstrap_address # presents. - elif any(value is not None and value != parsed_args.bootstrap_address - for value in subcloud_network_values): + elif any( + value is not None and value != parsed_args.bootstrap_address + for value in subcloud_network_values + ): # Not all network values exist error_msg = ( "For subcloud network reconfiguration request all the " "following parameters are necessary: --management-subnet, " "--management-gateway-ip, --management-start-ip, " - "--management-end-ip and --bootstrap-address") + "--management-end-ip and --bootstrap-address" + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.install_values: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values if parsed_args.bmc_password is not None: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) # Update the bootstrap values from yaml file if parsed_args.bootstrap_values: if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + f"bootstrap-values doesm't exist: {parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values if not (data or files): error_msg = "Nothing to update" @@ -740,12 +728,13 @@ class UpdateSubcloud(base.DCManagerShowOne): try: result = dcmanager_client.subcloud_manager.update_subcloud( - subcloud_ref, files=files, data=data) + subcloud_ref, files=files, data=data + ) update_fields_values(result) return result except Exception as e: print(e) - error_msg = "Unable to update subcloud %s" % (subcloud_ref) + error_msg = f"Unable to update subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -756,8 +745,10 @@ class ReconfigSubcloud(base.DCManagerShowOne): return detail_format def _get_resources(self, parsed_args): - deprecation_msg = ("This command has been deprecated. Please use " - "'subcloud deploy config' instead.") + deprecation_msg = ( + "This command has been deprecated. Please use " + "'subcloud deploy config' instead." + ) raise exceptions.DCManagerClientException(deprecation_msg) @@ -768,8 +759,10 @@ class ReinstallSubcloud(base.DCManagerShowOne): return detail_format def _get_resources(self, parsed_args): - deprecation_msg = ("This command has been deprecated. Please use " - "'subcloud redeploy' instead.") + deprecation_msg = ( + "This command has been deprecated. Please use " + "'subcloud redeploy' instead." + ) raise exceptions.DCManagerClientException(deprecation_msg) @@ -783,52 +776,51 @@ class RedeploySubcloud(base.DCManagerShowOne): parser = super(RedeploySubcloud, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to redeploy.' + "subcloud", help="Name or ID of the subcloud to redeploy." ) parser.add_argument( - '--install-values', + "--install-values", required=False, - help='YAML file containing parameters required for the ' - 'remote install of the subcloud.' + help="YAML file containing parameters required for the " + "remote install of the subcloud.", ) parser.add_argument( - '--bootstrap-values', + "--bootstrap-values", required=False, - help='YAML file containing subcloud configuration settings. ' - 'Can be either a local file path or a URL.' + help="YAML file containing subcloud configuration settings. " + "Can be either a local file path or a URL.", ) parser.add_argument( - '--deploy-config', + "--deploy-config", required=False, - help='YAML file containing subcloud variables to be passed to the ' - 'deploy playbook.' + help="YAML file containing subcloud variables to be passed to the " + "deploy playbook.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be configured, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be configured, " + "if not provided you will be prompted.", ) parser.add_argument( - '--bmc-password', + "--bmc-password", required=False, - help='bmc password of the subcloud to be configured, if not ' - 'provided you will be prompted. This parameter is only' - ' valid if the --install-values are specified.' + help="bmc password of the subcloud to be configured, if not " + "provided you will be prompted. This parameter is only" + " valid if the --install-values are specified.", ) parser.add_argument( - '--release', + "--release", required=False, - help='software release used to install, bootstrap and/or deploy ' - 'the subcloud with. If not specified, the current software ' - 'release of the system controller will be used.' + help="software release used to install, bootstrap and/or deploy " + "the subcloud with. If not specified, the current software " + "release of the system controller will be used.", ) return parser @@ -841,63 +833,71 @@ class RedeploySubcloud(base.DCManagerShowOne): # Get the install values yaml file if parsed_args.install_values is not None: if not os.path.isfile(parsed_args.install_values): - error_msg = "install-values does not exist: %s" % \ - parsed_args.install_values + error_msg = ( + f"install-values does not exist: {parsed_args.install_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['install_values'] = parsed_args.install_values + files["install_values"] = parsed_args.install_values # Get the bootstrap values yaml file if parsed_args.bootstrap_values is not None: if not os.path.isfile(parsed_args.bootstrap_values): - error_msg = "bootstrap-values does not exist: %s" % \ - parsed_args.bootstrap_values + error_msg = ( + "bootstrap-values does not exist: " + f"{parsed_args.bootstrap_values}" + ) raise exceptions.DCManagerClientException(error_msg) - files['bootstrap_values'] = parsed_args.bootstrap_values + files["bootstrap_values"] = parsed_args.bootstrap_values # Get the deploy config yaml file if parsed_args.deploy_config is not None: if not os.path.isfile(parsed_args.deploy_config): - error_msg = "deploy-config does not exist: %s" % \ - parsed_args.deploy_config + error_msg = ( + f"deploy-config does not exist: {parsed_args.deploy_config}" + ) raise exceptions.DCManagerClientException(error_msg) - files['deploy_config'] = parsed_args.deploy_config + files["deploy_config"] = parsed_args.deploy_config # Prompt the user for the subcloud's password if it isn't provided if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ) else: password = utils.prompt_for_password() - data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")) + data["sysadmin_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.install_values: if parsed_args.bmc_password: - data['bmc_password'] = base64.b64encode( - parsed_args.bmc_password.encode("utf-8")) - else: - password = utils.prompt_for_password('bmc') data["bmc_password"] = base64.b64encode( - password.encode("utf-8")) + parsed_args.bmc_password.encode("utf-8") + ) + else: + password = utils.prompt_for_password("bmc") + data["bmc_password"] = base64.b64encode(password.encode("utf-8")) if parsed_args.release is not None: - data['release'] = parsed_args.release + data["release"] = parsed_args.release # Require user to type redeploy to confirm - print("WARNING: This will redeploy the subcloud. " - "All applications and data on the subcloud will be lost.") - confirm = six.moves.input( - "Please type \"redeploy\" to confirm: ").strip().lower() - if confirm == 'redeploy': + print( + "WARNING: This will redeploy the subcloud. " + "All applications and data on the subcloud will be lost." + ) + confirm = ( + six.moves.input('Please type "redeploy" to confirm: ').strip().lower() + ) + if confirm == "redeploy": try: return dcmanager_client.subcloud_manager.redeploy_subcloud( - subcloud_ref=subcloud_ref, files=files, data=data) + subcloud_ref=subcloud_ref, files=files, data=data + ) except Exception as e: print(e) - error_msg = "Unable to redeploy subcloud %s" % (subcloud_ref) + error_msg = f"Unable to redeploy subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) else: - msg = "Subcloud %s will not be redeployed" % (subcloud_ref) + msg = f"Subcloud {subcloud_ref} will not be redeployed" raise exceptions.DCManagerClientException(msg) @@ -911,37 +911,36 @@ class RestoreSubcloud(base.DCManagerShowOne): parser = super(RestoreSubcloud, self).get_parser(prog_name) parser.add_argument( - '--restore-values', + "--restore-values", required=False, - help='YAML file containing subcloud restore settings. ' - 'Can be either a local file path or a URL.' + help="YAML file containing subcloud restore settings. " + "Can be either a local file path or a URL.", ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be restored, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be restored, " + "if not provided you will be prompted.", ) parser.add_argument( - '--with-install', + "--with-install", required=False, - action='store_true', - help='option to reinstall the subcloud as part of restore, ' - 'suitable only for subclouds that can be installed remotely.' + action="store_true", + help="option to reinstall the subcloud as part of restore, " + "suitable only for subclouds that can be installed remotely.", ) - parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to update.' - ) + parser.add_argument("subcloud", help="Name or ID of the subcloud to update.") return parser def _get_resources(self, parsed_args): - deprecation_msg = ('This command has been deprecated. Please use ' - 'subcloud-backup restore instead.') + deprecation_msg = ( + "This command has been deprecated. Please use " + "subcloud-backup restore instead." + ) raise exceptions.DCManagerClientException(deprecation_msg) @@ -955,30 +954,29 @@ class PrestageSubcloud(base.DCManagerShowOne): parser = super(PrestageSubcloud, self).get_parser(prog_name) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password of the subcloud to be prestaged, ' - 'if not provided you will be prompted.' + help="sysadmin password of the subcloud to be prestaged, " + "if not provided you will be prompted.", ) parser.add_argument( - 'subcloud', - help='Name or ID of the subcloud to prestage.' + "subcloud", help="Name or ID of the subcloud to prestage." ) parser.add_argument( - '--force', + "--force", required=False, - action='store_true', - help='Disregard subcloud management alarm condition' + action="store_true", + help="Disregard subcloud management alarm condition", ) parser.add_argument( - '--release', + "--release", required=False, help="software release used to prestage the subcloud with. " - "If not specified, the current software release of " - "the subcloud will be used." + "If not specified, the current software release of " + "the subcloud will be used.", ) return parser @@ -989,27 +987,29 @@ class PrestageSubcloud(base.DCManagerShowOne): data = dict() if parsed_args.force: - data['force'] = 'true' + data["force"] = "true" if parsed_args.sysadmin_password is not None: - data['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + data["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password() data["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") if parsed_args.release: - data['release'] = parsed_args.release + data["release"] = parsed_args.release try: - result = dcmanager_client.subcloud_manager.\ - prestage_subcloud( - subcloud_ref=subcloud_ref, data=data) + result = dcmanager_client.subcloud_manager.prestage_subcloud( + subcloud_ref=subcloud_ref, data=data + ) update_fields_values(result) return result except Exception as e: print(e) - error_msg = "Unable to prestage subcloud %s" % (subcloud_ref) + error_msg = f"Unable to prestage subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_peer_group_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_peer_group_manager.py index 5c00c34..bdb667d 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_peer_group_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/subcloud_peer_group_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -6,22 +6,21 @@ import base64 from osc_lib.command import command +from dcmanagerclient import exceptions, utils from dcmanagerclient.commands.v1 import base -from dcmanagerclient import exceptions -from dcmanagerclient import utils def group_format(subcloud_peer_group=None): columns = ( - 'id', - 'peer_group_name', - 'group_priority', - 'group_state', - 'system_leader_id', - 'system_leader_name', - 'max_subcloud_rehoming', - 'created_at', - 'updated_at', + "id", + "peer_group_name", + "group_priority", + "group_state", + "system_leader_id", + "system_leader_name", + "max_subcloud_rehoming", + "created_at", + "updated_at", ) if subcloud_peer_group: @@ -34,11 +33,11 @@ def group_format(subcloud_peer_group=None): subcloud_peer_group.system_leader_name, subcloud_peer_group.max_subcloud_rehoming, subcloud_peer_group.created_at, - subcloud_peer_group.updated_at + subcloud_peer_group.updated_at, ) else: - data = (('',) * len(columns),) + data = (("",) * len(columns),) return columns, data @@ -53,15 +52,14 @@ class MigrateSubcloudPeerGroup(base.DCManagerLister): parser = super(MigrateSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of the subcloud peer group to migrate.' + "group", help="Name or ID of the subcloud peer group to migrate." ) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='Sysadmin password of the subclouds to be configured, ' - 'if not provided you will be prompted.' + help="Sysadmin password of the subclouds to be configured, " + "if not provided you will be prompted.", ) return parser @@ -71,21 +69,21 @@ class MigrateSubcloudPeerGroup(base.DCManagerLister): kwargs = dict() if parsed_args.sysadmin_password is not None: - kwargs['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + kwargs["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password() kwargs["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") try: - return dcmanager_client. \ - subcloud_peer_group_manager.migrate_subcloud_peer_group( - subcloud_peer_group_ref, **kwargs) + return dcmanager_client.subcloud_peer_group_manager.\ + migrate_subcloud_peer_group(subcloud_peer_group_ref, **kwargs) except Exception as e: print(e) - msg = "Unable to migrate subcloud peer group %s" % ( - subcloud_peer_group_ref) + msg = f"Unable to migrate subcloud peer group {subcloud_peer_group_ref}" raise exceptions.DCManagerClientException(msg) @@ -99,25 +97,25 @@ class AddSubcloudPeerGroup(base.DCManagerShowOne): parser = super(AddSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - '--peer-group-name', + "--peer-group-name", required=True, - help='Name for the new subcloud peer group.' + help="Name for the new subcloud peer group.", ) parser.add_argument( - '--group-state', + "--group-state", required=False, - choices=['enabled', 'disabled'], - default='enabled', - help='Administrative control of subcloud group.' + choices=["enabled", "disabled"], + default="enabled", + help="Administrative control of subcloud group.", ) parser.add_argument( - '--max-subcloud-rehoming', + "--max-subcloud-rehoming", required=False, type=int, default=10, - help='Maximum number of subclouds to migrate in parallel' + help="Maximum number of subclouds to migrate in parallel", ) return parser @@ -125,17 +123,17 @@ class AddSubcloudPeerGroup(base.DCManagerShowOne): dcmanager_client = self.app.client_manager.subcloud_peer_group_manager kwargs = dict() - kwargs['peer-group-name'] = parsed_args.peer_group_name + kwargs["peer-group-name"] = parsed_args.peer_group_name if parsed_args.group_state is not None: - kwargs['group-state'] = parsed_args.group_state + kwargs["group-state"] = parsed_args.group_state if parsed_args.max_subcloud_rehoming is not None: - kwargs['max-subcloud-rehoming'] = \ - parsed_args.max_subcloud_rehoming + kwargs["max-subcloud-rehoming"] = parsed_args.max_subcloud_rehoming - return dcmanager_client.subcloud_peer_group_manager.\ - add_subcloud_peer_group(**kwargs) + return dcmanager_client.subcloud_peer_group_manager.add_subcloud_peer_group( + **kwargs + ) class DeleteSubcloudPeerGroup(command.Command): @@ -145,8 +143,7 @@ class DeleteSubcloudPeerGroup(command.Command): parser = super(DeleteSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of the subcloud peer group to delete.' + "group", help="Name or ID of the subcloud peer group to delete." ) return parser @@ -154,12 +151,12 @@ class DeleteSubcloudPeerGroup(command.Command): subcloud_peer_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_peer_group_manager try: - dcmanager_client.subcloud_peer_group_manager.\ - delete_subcloud_peer_group(subcloud_peer_group_ref) + dcmanager_client.subcloud_peer_group_manager.delete_subcloud_peer_group( + subcloud_peer_group_ref + ) except Exception as e: print(e) - msg = "Unable to delete subcloud peer group %s" % ( - subcloud_peer_group_ref) + msg = f"Unable to delete subcloud peer group {subcloud_peer_group_ref}" raise exceptions.DCManagerClientException(msg) @@ -173,8 +170,7 @@ class ShowSubcloudPeerGroup(base.DCManagerShowOne): parser = super(ShowSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of subcloud peer group to view the details.' + "group", help="Name or ID of subcloud peer group to view the details." ) return parser @@ -182,8 +178,11 @@ class ShowSubcloudPeerGroup(base.DCManagerShowOne): def _get_resources(self, parsed_args): subcloud_peer_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_peer_group_manager - return dcmanager_client.subcloud_peer_group_manager.\ - subcloud_peer_group_detail(subcloud_peer_group_ref) + return ( + dcmanager_client.subcloud_peer_group_manager.subcloud_peer_group_detail( + subcloud_peer_group_ref + ) + ) class ListSubcloudPeerGroup(base.DCManagerLister): @@ -198,8 +197,9 @@ class ListSubcloudPeerGroup(base.DCManagerLister): def _get_resources(self, parsed_args): dcmanager_client = self.app.client_manager.subcloud_peer_group_manager - return dcmanager_client.subcloud_peer_group_manager.\ - list_subcloud_peer_groups() + return ( + dcmanager_client.subcloud_peer_group_manager.list_subcloud_peer_groups() + ) class ListSubcloudPeerGroupSubclouds(base.DCManagerLister): @@ -209,19 +209,19 @@ class ListSubcloudPeerGroupSubclouds(base.DCManagerLister): return utils.subcloud_detail_format def get_parser(self, prog_name): - parser = super(ListSubcloudPeerGroupSubclouds, - self).get_parser(prog_name) + parser = super(ListSubcloudPeerGroupSubclouds, self).get_parser(prog_name) parser.add_argument( - 'group', + "group", help="Name or ID of subcloud peer group to list " - "associated subclouds." + "associated subclouds.", ) return parser def _get_resources(self, parsed_args): subcloud_peer_group_ref = parsed_args.group - dcmanager_client = self.app.client_manager.subcloud_peer_group_manager - return dcmanager_client.subcloud_peer_group_manager. \ + client_manager = self.app.client_manager + dcmanager_client = client_manager.subcloud_peer_group_manager + return dcmanager_client.subcloud_peer_group_manager.\ subcloud_peer_group_list_subclouds(subcloud_peer_group_ref) @@ -235,28 +235,27 @@ class UpdateSubcloudPeerGroup(base.DCManagerShowOne): parser = super(UpdateSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of the subcloud peer group to update.' + "group", help="Name or ID of the subcloud peer group to update." ) parser.add_argument( - '--peer-group-name', + "--peer-group-name", required=False, - help='Name for the new subcloud peer group.' + help="Name for the new subcloud peer group.", ) parser.add_argument( - '--group-state', + "--group-state", required=False, - choices=['enabled', 'disabled'], - help='Administrative control of subcloud peer group.' + choices=["enabled", "disabled"], + help="Administrative control of subcloud peer group.", ) parser.add_argument( - '--max-subcloud-rehoming', + "--max-subcloud-rehoming", required=False, type=int, - help='Maximum number of subclouds to migrate in parallel' + help="Maximum number of subclouds to migrate in parallel", ) return parser @@ -266,27 +265,24 @@ class UpdateSubcloudPeerGroup(base.DCManagerShowOne): kwargs = dict() if parsed_args.peer_group_name is not None: - kwargs['peer-group-name'] = parsed_args.peer_group_name + kwargs["peer-group-name"] = parsed_args.peer_group_name if parsed_args.group_state is not None: - kwargs['group-state'] = parsed_args.group_state + kwargs["group-state"] = parsed_args.group_state if parsed_args.max_subcloud_rehoming is not None: - kwargs['max-subcloud-rehoming'] = \ - parsed_args.max_subcloud_rehoming + kwargs["max-subcloud-rehoming"] = parsed_args.max_subcloud_rehoming if len(kwargs) == 0: error_msg = "Nothing to update" raise exceptions.DCManagerClientException(error_msg) try: - return dcmanager_client. \ - subcloud_peer_group_manager.update_subcloud_peer_group( - subcloud_peer_group_ref, **kwargs) + return dcmanager_client.subcloud_peer_group_manager.\ + update_subcloud_peer_group(subcloud_peer_group_ref, **kwargs) except Exception as e: print(e) - msg = "Unable to update subcloud peer group %s" % ( - subcloud_peer_group_ref) + msg = f"Unable to update subcloud peer group {subcloud_peer_group_ref}" raise exceptions.DCManagerClientException(msg) @@ -294,30 +290,30 @@ def detail_status_format(subcloud_peer_group_status=None): # Include all the fields in peer_group_format # plus some additional fields columns = ( - 'peer_group_id', - 'peer_group_name', - 'total_subclouds', - 'complete', - 'waiting_for_migrate', - 'rehoming', - 'rehome_failed', - 'managed', - 'unmanaged', + "peer_group_id", + "peer_group_name", + "total_subclouds", + "complete", + "waiting_for_migrate", + "rehoming", + "rehome_failed", + "managed", + "unmanaged", ) if subcloud_peer_group_status: data = ( - subcloud_peer_group_status['peer_group_id'], - subcloud_peer_group_status['peer_group_name'], - subcloud_peer_group_status['total_subclouds'], - subcloud_peer_group_status['complete'], - subcloud_peer_group_status['waiting_for_migrate'], - subcloud_peer_group_status['rehoming'], - subcloud_peer_group_status['rehome_failed'], - subcloud_peer_group_status['managed'], - subcloud_peer_group_status['unmanaged'], + subcloud_peer_group_status["peer_group_id"], + subcloud_peer_group_status["peer_group_name"], + subcloud_peer_group_status["total_subclouds"], + subcloud_peer_group_status["complete"], + subcloud_peer_group_status["waiting_for_migrate"], + subcloud_peer_group_status["rehoming"], + subcloud_peer_group_status["rehome_failed"], + subcloud_peer_group_status["managed"], + subcloud_peer_group_status["unmanaged"], ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -331,8 +327,7 @@ class StatusSubcloudPeerGroup(base.DCManagerShowOne): parser = super(StatusSubcloudPeerGroup, self).get_parser(prog_name) parser.add_argument( - 'group', - help='Name or ID of subcloud peer group to view the status.' + "group", help="Name or ID of subcloud peer group to view the status." ) return parser @@ -340,5 +335,8 @@ class StatusSubcloudPeerGroup(base.DCManagerShowOne): def _get_resources(self, parsed_args): subcloud_peer_group_ref = parsed_args.group dcmanager_client = self.app.client_manager.subcloud_peer_group_manager - return dcmanager_client.subcloud_peer_group_manager.\ - subcloud_peer_group_status(subcloud_peer_group_ref) + return ( + dcmanager_client.subcloud_peer_group_manager.subcloud_peer_group_status( + subcloud_peer_group_ref + ) + ) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_deploy_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_deploy_manager.py index 2ee3a7c..d3f1dfe 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_deploy_manager.py @@ -6,8 +6,6 @@ from dcmanagerclient.commands.v1 import sw_update_manager -RELEASE_FIELD = -3 - class SwDeployManagerMixin(object): """This Mixin provides the manager used for software deploy releases.""" @@ -23,11 +21,20 @@ class SwDeployManagerMixin(object): if sw_update_strategy and sw_update_strategy.extra_args: release_id = sw_update_strategy.extra_args.get("release_id") - # Insert the 'release_id' field before the 'state', - # 'created_at' and 'updated_at' fields - columns = columns[:RELEASE_FIELD] + ( - "release_id",) + columns[RELEASE_FIELD:] - data = data[:RELEASE_FIELD] + (release_id,) + data[RELEASE_FIELD:] + # Find the index of 'stop on failure' in the tuple + failure_status_index = columns.index("stop on failure") + + # Insert the 'release_id' field before the 'stop on failure', + columns = ( + columns[:failure_status_index + 1] + + ("release_id",) + + columns[failure_status_index + 1:] + ) + data = ( + data[:failure_status_index + 1] + + (release_id,) + + data[failure_status_index + 1:] + ) return columns, data def _get_format_function(self): @@ -71,25 +78,21 @@ class ShowSwDeployStrategy( SwDeployManagerMixin, sw_update_manager.ShowSwUpdateStrategy ): """Show the details of a software deploy strategy for a subcloud.""" - pass class DeleteSwDeployStrategy( SwDeployManagerMixin, sw_update_manager.DeleteSwUpdateStrategy ): """Delete software deploy strategy from the database.""" - pass class ApplySwDeployStrategy( SwDeployManagerMixin, sw_update_manager.ApplySwUpdateStrategy ): """Apply a software deploy strategy.""" - pass class AbortSwDeployStrategy( SwDeployManagerMixin, sw_update_manager.AbortSwUpdateStrategy ): """Abort a software deploy strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_patch_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_patch_manager.py index ecd3962..def8881 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_patch_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_patch_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017-2023 Wind River Systems, Inc. +# Copyright (c) 2017-2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,8 +28,7 @@ class SwPatchManagerMixin(object): columns, data = original_fmt_func(sw_update_strategy) if sw_update_strategy.extra_args: - upload_only = sw_update_strategy.extra_args.get("upload-only", - False) + upload_only = sw_update_strategy.extra_args.get("upload-only", False) else: upload_only = False @@ -44,48 +43,49 @@ class SwPatchManagerMixin(object): return self.custom_format_function -class CreatePatchUpdateStrategy(SwPatchManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreatePatchUpdateStrategy( + SwPatchManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a patch update strategy.""" def get_parser(self, prog_name): parser = super(CreatePatchUpdateStrategy, self).get_parser(prog_name) parser.add_argument( - '--upload-only', + "--upload-only", required=False, - action='store_true', - help='Stops strategy after uploading patches to subclouds' + action="store_true", + help="Stops strategy after uploading patches to subclouds", ) return parser def process_custom_params(self, parsed_args, kwargs_dict): """Updates kwargs dictionary from parsed_args for patching""" if parsed_args.upload_only: - kwargs_dict['upload-only'] = 'true' + kwargs_dict["upload-only"] = "true" else: - kwargs_dict['upload-only'] = 'false' + kwargs_dict["upload-only"] = "false" -class ShowPatchUpdateStrategy(SwPatchManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowPatchUpdateStrategy( + SwPatchManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a patch update strategy for a subcloud.""" - pass -class DeletePatchUpdateStrategy(SwPatchManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeletePatchUpdateStrategy( + SwPatchManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete patch update strategy from the database.""" - pass -class ApplyPatchUpdateStrategy(SwPatchManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplyPatchUpdateStrategy( + SwPatchManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a patch update strategy.""" - pass -class AbortPatchUpdateStrategy(SwPatchManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortPatchUpdateStrategy( + SwPatchManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a patch update strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_prestage_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_prestage_manager.py index 977e3e9..b4a9034 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_prestage_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_prestage_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2022-2023 Wind River Systems, Inc. +# Copyright (c) 2022-2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,8 @@ import base64 -from dcmanagerclient.commands.v1 import sw_update_manager from dcmanagerclient import utils +from dcmanagerclient.commands.v1 import sw_update_manager class SwPrestageManagerMixin(object): @@ -32,12 +32,14 @@ class SwPrestageManagerMixin(object): if sw_update_strategy.extra_args: prestage_software_version = sw_update_strategy.extra_args.get( - "prestage-software-version") + "prestage-software-version" + ) if prestage_software_version: # Insert the 'software version' field before the 'state', # 'created_at' and 'updated_at' fields if it's present - columns = columns[:-3] + ("prestage software version",) + \ - columns[-3:] + columns = ( + columns[:-3] + ("prestage software version",) + columns[-3:] + ) data = data[:-3] + (prestage_software_version,) + data[-3:] return columns, data @@ -46,33 +48,33 @@ class SwPrestageManagerMixin(object): return self.custom_format_function -class CreateSwPrestageStrategy(SwPrestageManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreateSwPrestageStrategy( + SwPrestageManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a prestage strategy.""" def add_force_argument(self, parser): parser.add_argument( - '--force', + "--force", required=False, - action='store_true', - help='Skip checking the subcloud for \ - management affecting alarms. ' + action="store_true", + help="Skip checking the subcloud for \ + management affecting alarms. ", ) def get_parser(self, prog_name): - parser = super(CreateSwPrestageStrategy, - self).get_parser(prog_name) + parser = super(CreateSwPrestageStrategy, self).get_parser(prog_name) parser.add_argument( - '--sysadmin-password', + "--sysadmin-password", required=False, - help='sysadmin password, will be prompted if not provided.' + help="sysadmin password, will be prompted if not provided.", ) parser.add_argument( - '--release', + "--release", required=False, help="software release used to prestage the subcloud with. " - "If not specified, the current software release of " - "the subcloud will be used." + "If not specified, the current software release of " + "the subcloud will be used.", ) return parser @@ -85,12 +87,14 @@ class CreateSwPrestageStrategy(SwPrestageManagerMixin, # The binary base64 encoded string (eg. b'dGVzdA==') is not JSON # serializable in Python3.x, so it has to be decoded to a JSON # serializable string (eg. 'dGVzdA=='). - kwargs_dict['sysadmin_password'] = base64.b64encode( - parsed_args.sysadmin_password.encode("utf-8")).decode("utf-8") + kwargs_dict["sysadmin_password"] = base64.b64encode( + parsed_args.sysadmin_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password() kwargs_dict["sysadmin_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") if parsed_args.release is not None: kwargs_dict["release"] = parsed_args.release @@ -100,25 +104,25 @@ class CreateSwPrestageStrategy(SwPrestageManagerMixin, pass -class ShowSwPrestageStrategy(SwPrestageManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowSwPrestageStrategy( + SwPrestageManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a prestage strategy.""" - pass -class DeleteSwPrestageStrategy(SwPrestageManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeleteSwPrestageStrategy( + SwPrestageManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete a prestage strategy.""" - pass -class ApplySwPrestageStrategy(SwPrestageManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplySwPrestageStrategy( + SwPrestageManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a prestage strategy.""" - pass -class AbortSwPrestageStrategy(SwPrestageManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortSwPrestageStrategy( + SwPrestageManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a prestage strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_manager.py index abeeb3a..90467cf 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2023 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,8 +14,8 @@ # limitations under the License. # -from dcmanagerclient.commands.v1 import base from dcmanagerclient import exceptions +from dcmanagerclient.commands.v1 import base # These are the abstract base classes used for sw update managers such as # - sw-patch-manager @@ -26,13 +26,13 @@ from dcmanagerclient import exceptions def detail_format(sw_update_strategy=None): columns = ( - 'strategy type', - 'subcloud apply type', - 'max parallel subclouds', - 'stop on failure', - 'state', - 'created_at', - 'updated_at', + "strategy type", + "subcloud apply type", + "max parallel subclouds", + "stop on failure", + "state", + "created_at", + "updated_at", ) if sw_update_strategy: @@ -46,19 +46,19 @@ def detail_format(sw_update_strategy=None): sw_update_strategy.updated_at, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data def strategy_step_format(strategy_step=None): columns = ( - 'cloud', - 'stage', - 'state', - 'details', - 'started_at', - 'finished_at', + "cloud", + "stage", + "state", + "details", + "started_at", + "finished_at", ) if strategy_step: @@ -72,21 +72,21 @@ def strategy_step_format(strategy_step=None): ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data def detail_strategy_step_format(strategy_step=None): columns = ( - 'cloud', - 'stage', - 'state', - 'details', - 'started_at', - 'finished_at', - 'created_at', - 'updated_at', + "cloud", + "stage", + "state", + "details", + "started_at", + "finished_at", + "created_at", + "updated_at", ) if strategy_step: @@ -102,7 +102,7 @@ def detail_strategy_step_format(strategy_step=None): ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -119,49 +119,47 @@ class CreateSwUpdateStrategy(base.DCManagerShowOne): def add_force_argument(self, parser): parser.add_argument( - '--force', + "--force", required=False, - action='store_true', - help='Disregard subcloud availability status, intended for \ + action="store_true", + help="Disregard subcloud availability status, intended for \ some upgrade recovery scenarios. Subcloud name must be \ - specified.' + specified.", ) def get_parser(self, prog_name): parser = super(CreateSwUpdateStrategy, self).get_parser(prog_name) parser.add_argument( - '--subcloud-apply-type', + "--subcloud-apply-type", required=False, - choices=['parallel', 'serial'], - help='Subcloud apply type (parallel or serial).' + choices=["parallel", "serial"], + help="Subcloud apply type (parallel or serial).", ) parser.add_argument( - '--max-parallel-subclouds', + "--max-parallel-subclouds", required=False, type=int, - help='Maximum number of parallel subclouds.' + help="Maximum number of parallel subclouds.", ) parser.add_argument( - '--stop-on-failure', + "--stop-on-failure", required=False, - action='store_true', - help='Do not update any additional subclouds after a failure.' + action="store_true", + help="Do not update any additional subclouds after a failure.", ) parser.add_argument( - '--group', - required=False, - help='Name or ID of subcloud group to update.' + "--group", required=False, help="Name or ID of subcloud group to update." ) parser.add_argument( - 'cloud_name', - nargs='?', + "cloud_name", + nargs="?", default=None, - help='Name of a single cloud to update.' + help="Name of a single cloud to update.", ) self.add_force_argument(parser) @@ -172,45 +170,50 @@ class CreateSwUpdateStrategy(base.DCManagerShowOne): def validate_force_params(self, parsed_args): """Most orchestrations only support force for a single subcloud""" if parsed_args.force and not parsed_args.cloud_name: - error_msg = 'The --force option can only be applied to a single ' \ - 'subcloud. Please specify the subcloud name.' + error_msg = ( + "The --force option can only be applied to a single " + "subcloud. Please specify the subcloud name." + ) raise exceptions.DCManagerClientException(error_msg) def validate_group_params(self, parsed_args): """When specifying a group, other inputs are considered invalid""" if parsed_args.group: if parsed_args.cloud_name: - error_msg = 'The cloud_name and group options are mutually ' \ - 'exclusive.' + error_msg = ( + "The cloud_name and group options are mutually exclusive." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.subcloud_apply_type: - error_msg = 'The --subcloud-apply-type is not ' \ - 'supported when --group option is applied.' + error_msg = ( + "The --subcloud-apply-type is not " + "supported when --group option is applied." + ) raise exceptions.DCManagerClientException(error_msg) if parsed_args.max_parallel_subclouds: - error_msg = 'The --max-parallel-subclouds options is not ' \ - 'supported when --group option is applied.' + error_msg = ( + "The --max-parallel-subclouds options is not " + "supported when --group option is applied." + ) raise exceptions.DCManagerClientException(error_msg) def process_custom_params(self, parsed_args, kwargs_dict): """Updates kwargs dictionary from parsed_args based on the subclass""" - pass def _get_resources(self, parsed_args): kwargs = dict() if parsed_args.subcloud_apply_type: - kwargs['subcloud-apply-type'] = parsed_args.subcloud_apply_type + kwargs["subcloud-apply-type"] = parsed_args.subcloud_apply_type if parsed_args.max_parallel_subclouds: - kwargs['max-parallel-subclouds'] = \ - parsed_args.max_parallel_subclouds + kwargs["max-parallel-subclouds"] = parsed_args.max_parallel_subclouds if parsed_args.stop_on_failure: - kwargs['stop-on-failure'] = 'true' + kwargs["stop-on-failure"] = "true" if parsed_args.force: - kwargs['force'] = 'true' + kwargs["force"] = "true" if parsed_args.cloud_name is not None: - kwargs['cloud_name'] = parsed_args.cloud_name + kwargs["cloud_name"] = parsed_args.cloud_name if parsed_args.group is not None: - kwargs['subcloud_group'] = parsed_args.group + kwargs["subcloud_group"] = parsed_args.group self.validate_force_params(parsed_args) self.validate_group_params(parsed_args) @@ -304,13 +307,9 @@ class ShowSwUpdateStrategyStep(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(ShowSwUpdateStrategyStep, self).get_parser(prog_name) - parser.add_argument( - 'cloud_name', - help='Name of cloud to view the details.' - ) + parser.add_argument("cloud_name", help="Name of cloud to view the details.") return parser def _get_resources(self, parsed_args): cloud_name = parsed_args.cloud_name - return self.get_strategy_step_manager().strategy_step_detail( - cloud_name) + return self.get_strategy_step_manager().strategy_step_detail(cloud_name) diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_options_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_options_manager.py index 05c05fc..b4d7712 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_options_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_update_options_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,8 +16,8 @@ from osc_lib.command import command -from dcmanagerclient.commands.v1 import base from dcmanagerclient import exceptions +from dcmanagerclient.commands.v1 import base DEFAULT_REGION_NAME = "RegionOne" @@ -25,14 +25,14 @@ DEFAULT_REGION_NAME = "RegionOne" def options_detail_format(sw_update_options=None): columns = ( - 'cloud', - 'storage apply type', - 'worker apply type', - 'max parallel workers', - 'alarm restriction type', - 'default instance action', - 'created_at', - 'updated_at', + "cloud", + "storage apply type", + "worker apply type", + "max parallel workers", + "alarm restriction type", + "default instance action", + "created_at", + "updated_at", ) if sw_update_options: @@ -47,19 +47,19 @@ def options_detail_format(sw_update_options=None): sw_update_options.updated_at, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data def options_list_format(sw_update_option=None): columns = ( - 'cloud', - 'storage apply type', - 'worker apply type', - 'max parallel workers', - 'alarm restriction type', - 'default instance action', + "cloud", + "storage apply type", + "worker apply type", + "max parallel workers", + "alarm restriction type", + "default instance action", ) if sw_update_option: @@ -73,7 +73,7 @@ def options_list_format(sw_update_option=None): ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -88,46 +88,46 @@ class UpdateSwUpdateOptions(base.DCManagerShowOne): parser = super(UpdateSwUpdateOptions, self).get_parser(prog_name) parser.add_argument( - '--storage-apply-type', + "--storage-apply-type", required=True, - choices=['parallel', 'serial'], - help='Storage node apply type (parallel or serial).' + choices=["parallel", "serial"], + help="Storage node apply type (parallel or serial).", ) parser.add_argument( - '--worker-apply-type', + "--worker-apply-type", required=True, - choices=['parallel', 'serial'], - help='Compute node apply type (parallel or serial).' + choices=["parallel", "serial"], + help="Compute node apply type (parallel or serial).", ) parser.add_argument( - '--max-parallel-workers', + "--max-parallel-workers", required=True, type=int, - help='Maximum number of parallel workers.' + help="Maximum number of parallel workers.", ) parser.add_argument( - '--alarm-restriction-type', + "--alarm-restriction-type", required=True, - choices=['strict', 'relaxed'], - help='Whether to allow patching if subcloud alarms are present or ' - 'not (strict, relaxed).' + choices=["strict", "relaxed"], + help="Whether to allow patching if subcloud alarms are present or " + "not (strict, relaxed).", ) parser.add_argument( - '--default-instance-action', + "--default-instance-action", required=True, - choices=['stop-start', 'migrate'], - help='How instances should be handled.' + choices=["stop-start", "migrate"], + help="How instances should be handled.", ) parser.add_argument( - 'subcloud', - nargs='?', + "subcloud", + nargs="?", default=None, - help='Subcloud name or id, omit to set default options.' + help="Subcloud name or id, omit to set default options.", ) return parser @@ -136,19 +136,21 @@ class UpdateSwUpdateOptions(base.DCManagerShowOne): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.sw_update_options_manager kwargs = dict() - kwargs['storage-apply-type'] = parsed_args.storage_apply_type - kwargs['worker-apply-type'] = parsed_args.worker_apply_type - kwargs['max-parallel-workers'] = parsed_args.max_parallel_workers - kwargs['alarm-restriction-type'] = parsed_args.alarm_restriction_type - kwargs['default-instance-action'] = parsed_args.default_instance_action + kwargs["storage-apply-type"] = parsed_args.storage_apply_type + kwargs["worker-apply-type"] = parsed_args.worker_apply_type + kwargs["max-parallel-workers"] = parsed_args.max_parallel_workers + kwargs["alarm-restriction-type"] = parsed_args.alarm_restriction_type + kwargs["default-instance-action"] = parsed_args.default_instance_action try: - return dcmanager_client.sw_update_options_manager.\ - sw_update_options_update(subcloud_ref, **kwargs) + return ( + dcmanager_client.sw_update_options_manager.sw_update_options_update( + subcloud_ref, **kwargs + ) + ) except Exception as e: print(e) - error_msg = "Unable to update patch options for subcloud %s" % \ - (subcloud_ref) + error_msg = f"Unable to update patch options for subcloud {subcloud_ref}" raise exceptions.DCManagerClientException(error_msg) @@ -164,8 +166,7 @@ class ListSwUpdateOptions(base.DCManagerLister): def _get_resources(self, parsed_args): dcmanager_client = self.app.client_manager.sw_update_options_manager - return dcmanager_client.sw_update_options_manager.\ - sw_update_options_list() + return dcmanager_client.sw_update_options_manager.sw_update_options_list() class ShowSwUpdateOptions(base.DCManagerShowOne): @@ -178,10 +179,10 @@ class ShowSwUpdateOptions(base.DCManagerShowOne): parser = super(ShowSwUpdateOptions, self).get_parser(prog_name) parser.add_argument( - 'subcloud', - nargs='?', + "subcloud", + nargs="?", default=None, - help='Subcloud name or id, omit to show default options.' + help="Subcloud name or id, omit to show default options.", ) return parser @@ -189,8 +190,9 @@ class ShowSwUpdateOptions(base.DCManagerShowOne): def _get_resources(self, parsed_args): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.sw_update_options_manager - return dcmanager_client.sw_update_options_manager.\ - sw_update_options_detail(subcloud_ref) + return dcmanager_client.sw_update_options_manager.sw_update_options_detail( + subcloud_ref + ) class DeleteSwUpdateOptions(command.Command): @@ -199,10 +201,7 @@ class DeleteSwUpdateOptions(command.Command): def get_parser(self, prog_name): parser = super(DeleteSwUpdateOptions, self).get_parser(prog_name) - parser.add_argument( - 'subcloud', - help='Subcloud name or id' - ) + parser.add_argument("subcloud", help="Subcloud name or id") return parser @@ -210,8 +209,11 @@ class DeleteSwUpdateOptions(command.Command): subcloud_ref = parsed_args.subcloud dcmanager_client = self.app.client_manager.sw_update_options_manager try: - return dcmanager_client.sw_update_options_manager.\ - sw_update_options_delete(subcloud_ref) + return ( + dcmanager_client.sw_update_options_manager.sw_update_options_delete( + subcloud_ref + ) + ) except Exception as e: print(e) error_msg = "Unable to delete patch options" diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/sw_upgrade_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/sw_upgrade_manager.py index 21e3f27..af5ee9f 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/sw_upgrade_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/sw_upgrade_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,21 +24,24 @@ class SwUpgradeManagerMixin(object): return dcmanager_client.sw_upgrade_manager -class CreateSwUpgradeStrategy(SwUpgradeManagerMixin, - sw_update_manager.CreateSwUpdateStrategy): +class CreateSwUpgradeStrategy( + SwUpgradeManagerMixin, sw_update_manager.CreateSwUpdateStrategy +): """Create a software upgrade strategy.""" + def add_force_argument(self, parser): parser.add_argument( - '--force', + "--force", required=False, - action='store_true', - help='Allow upgrade with the subcloud group \ - rather than a single subcloud name/ID' + action="store_true", + help=( + "Allow upgrade with the subcloud group " + "rather than a single subcloud name/ID" + ), ) def get_parser(self, prog_name): - parser = super(CreateSwUpgradeStrategy, - self).get_parser(prog_name) + parser = super(CreateSwUpgradeStrategy, self).get_parser(prog_name) return parser @@ -47,25 +50,25 @@ class CreateSwUpgradeStrategy(SwUpgradeManagerMixin, pass -class ShowSwUpgradeStrategy(SwUpgradeManagerMixin, - sw_update_manager.ShowSwUpdateStrategy): +class ShowSwUpgradeStrategy( + SwUpgradeManagerMixin, sw_update_manager.ShowSwUpdateStrategy +): """Show the details of a software upgrade strategy for a subcloud.""" - pass -class DeleteSwUpgradeStrategy(SwUpgradeManagerMixin, - sw_update_manager.DeleteSwUpdateStrategy): +class DeleteSwUpgradeStrategy( + SwUpgradeManagerMixin, sw_update_manager.DeleteSwUpdateStrategy +): """Delete software upgrade strategy from the database.""" - pass -class ApplySwUpgradeStrategy(SwUpgradeManagerMixin, - sw_update_manager.ApplySwUpdateStrategy): +class ApplySwUpgradeStrategy( + SwUpgradeManagerMixin, sw_update_manager.ApplySwUpdateStrategy +): """Apply a software upgrade strategy.""" - pass -class AbortSwUpgradeStrategy(SwUpgradeManagerMixin, - sw_update_manager.AbortSwUpdateStrategy): +class AbortSwUpgradeStrategy( + SwUpgradeManagerMixin, sw_update_manager.AbortSwUpdateStrategy +): """Abort a software upgrade strategy.""" - pass diff --git a/distributedcloud-client/dcmanagerclient/commands/v1/system_peer_manager.py b/distributedcloud-client/dcmanagerclient/commands/v1/system_peer_manager.py index e0720b9..e7b2d8e 100644 --- a/distributedcloud-client/dcmanagerclient/commands/v1/system_peer_manager.py +++ b/distributedcloud-client/dcmanagerclient/commands/v1/system_peer_manager.py @@ -1,26 +1,26 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import base64 + from osc_lib.command import command +from dcmanagerclient import exceptions, utils from dcmanagerclient.commands.v1 import base -from dcmanagerclient import exceptions -from dcmanagerclient import utils def group_format(subcloud_peer_group=None): columns = ( - 'id', - 'peer_group_name', - 'group_priority', - 'group_state', - 'system_leader_id', - 'system_leader_name', - 'max_subcloud_rehoming', + "id", + "peer_group_name", + "group_priority", + "group_state", + "system_leader_id", + "system_leader_name", + "max_subcloud_rehoming", ) if subcloud_peer_group: @@ -35,18 +35,18 @@ def group_format(subcloud_peer_group=None): ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data def peer_format(system_peer=None): columns = ( - 'id', - 'peer uuid', - 'peer name', - 'manager endpoint', - 'controller gateway address' + "id", + "peer uuid", + "peer name", + "manager endpoint", + "controller gateway address", ) if system_peer: @@ -55,11 +55,11 @@ def peer_format(system_peer=None): system_peer.peer_uuid, system_peer.peer_name, system_peer.manager_endpoint, - system_peer.gateway_address + system_peer.gateway_address, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -68,20 +68,20 @@ def detail_peer_format(system_peer=None): # Include all the fields in peer_format # plus some additional fields columns = ( - 'id', - 'peer uuid', - 'peer name', - 'manager endpoint', - 'manager username', - 'controller gateway address', - 'administrative state', - 'heartbeat interval', - 'heartbeat failure threshold', - 'heartbeat failure policy', - 'heartbeat maintenance timeout', - 'availability state', - 'created_at', - 'updated_at', + "id", + "peer uuid", + "peer name", + "manager endpoint", + "manager username", + "controller gateway address", + "administrative state", + "heartbeat interval", + "heartbeat failure threshold", + "heartbeat failure policy", + "heartbeat maintenance timeout", + "availability state", + "created_at", + "updated_at", ) if system_peer: @@ -102,7 +102,7 @@ def detail_peer_format(system_peer=None): system_peer.updated_at, ) else: - data = (tuple('' for _ in range(len(columns))),) + data = (tuple("" for _ in range(len(columns))),) return columns, data @@ -117,80 +117,76 @@ class AddSystemPeer(base.DCManagerShowOne): parser = super(AddSystemPeer, self).get_parser(prog_name) parser.add_argument( - '--peer-uuid', - required=True, - help='UUID of the new system peer.' + "--peer-uuid", required=True, help="UUID of the new system peer." ) parser.add_argument( - '--peer-name', - required=True, - help='Name for the new system peer.' + "--peer-name", required=True, help="Name for the new system peer." ) parser.add_argument( - '--manager-endpoint', + "--manager-endpoint", required=True, - help='URI of DC manager of peer System Controller.' + help="URI of DC manager of peer System Controller.", ) parser.add_argument( - '--peer-controller-gateway-address', + "--peer-controller-gateway-address", required=True, - help='Gateway address of peer site controller node.' + help="Gateway address of peer site controller node.", ) parser.add_argument( - '--manager-username', + "--manager-username", required=False, - default='admin', - help='Administrative username (default admin).' + default="admin", + help="Administrative username (default admin).", ) parser.add_argument( - '--manager-password', + "--manager-password", required=False, - help='Admin user password for authenticating into the DC manager.' + help="Admin user password for authenticating into the DC manager.", ) parser.add_argument( - '--administrative-state', + "--administrative-state", required=False, - choices=['enabled', 'disabled'], - default='enabled', - help='Administrative control of peer state (default enabled).' + choices=["enabled", "disabled"], + default="enabled", + help="Administrative control of peer state (default enabled).", ) parser.add_argument( - '--heartbeat-interval', + "--heartbeat-interval", required=False, default=60, - help='Interval between heartbeat messages (in seconds) (default \ - 60).' + help="Interval between heartbeat messages (in seconds) (default \ + 60).", ) parser.add_argument( - '--heartbeat-failure-threshold', + "--heartbeat-failure-threshold", required=False, default=3, - help='Consecutive heartbeat failures before failure declared \ - (default 3).' + help="Consecutive heartbeat failures before failure declared \ + (default 3).", ) parser.add_argument( - '--heartbeat-failure-policy', + "--heartbeat-failure-policy", required=False, - choices=['alarm', 'rehome', 'delegate'], - default='alarm', - help='Action to take with failure detection (default alarm).' + choices=["alarm", "rehome", "delegate"], + default="alarm", + help="Action to take with failure detection (default alarm).", ) parser.add_argument( - '--heartbeat-maintenance-timeout', + "--heartbeat-maintenance-timeout", required=False, default=600, - help='Overall failure timeout during maintenance state (in \ - seconds) (default 600).' + help="Overall failure timeout during maintenance state (in \ + seconds) (default 600).", ) return parser @@ -199,51 +195,54 @@ class AddSystemPeer(base.DCManagerShowOne): kwargs = dict() if parsed_args.peer_uuid is not None: - kwargs['peer_uuid'] = parsed_args.peer_uuid + kwargs["peer_uuid"] = parsed_args.peer_uuid if parsed_args.peer_name is not None: - kwargs['peer_name'] = parsed_args.peer_name + kwargs["peer_name"] = parsed_args.peer_name if parsed_args.manager_endpoint is not None: - kwargs['manager_endpoint'] = parsed_args.manager_endpoint + kwargs["manager_endpoint"] = parsed_args.manager_endpoint if parsed_args.manager_username is not None: - kwargs['manager_username'] = parsed_args.manager_username + kwargs["manager_username"] = parsed_args.manager_username # Prompt the user for the peer's password if it isn't provided if parsed_args.manager_password is not None: - kwargs['manager_password'] = base64.b64encode( - parsed_args.manager_password.encode("utf-8")).decode("utf-8") + kwargs["manager_password"] = base64.b64encode( + parsed_args.manager_password.encode("utf-8") + ).decode("utf-8") else: password = utils.prompt_for_password( - password_type=parsed_args.manager_username, - item_type='system peer') + password_type=parsed_args.manager_username, item_type="system peer" + ) kwargs["manager_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") if parsed_args.peer_controller_gateway_address is not None: - kwargs['peer_controller_gateway_address'] = \ + kwargs["peer_controller_gateway_address"] = ( parsed_args.peer_controller_gateway_address + ) if parsed_args.administrative_state is not None: - kwargs['administrative_state'] = parsed_args.administrative_state + kwargs["administrative_state"] = parsed_args.administrative_state if parsed_args.heartbeat_interval is not None: - kwargs['heartbeat_interval'] = parsed_args.heartbeat_interval + kwargs["heartbeat_interval"] = parsed_args.heartbeat_interval if parsed_args.heartbeat_failure_threshold is not None: - kwargs['heartbeat_failure_threshold'] = \ + kwargs["heartbeat_failure_threshold"] = ( parsed_args.heartbeat_failure_threshold + ) if parsed_args.heartbeat_failure_policy is not None: - kwargs['heartbeat_failure_policy'] = \ - parsed_args.heartbeat_failure_policy + kwargs["heartbeat_failure_policy"] = parsed_args.heartbeat_failure_policy if parsed_args.heartbeat_maintenance_timeout is not None: - kwargs['heartbeat_maintenance_timeout'] = \ + kwargs["heartbeat_maintenance_timeout"] = ( parsed_args.heartbeat_maintenance_timeout - return dcmanager_client.system_peer_manager.add_system_peer( - **kwargs) + ) + return dcmanager_client.system_peer_manager.add_system_peer(**kwargs) class ListSystemPeer(base.DCManagerLister): @@ -268,20 +267,20 @@ class ListSystemPeerSubcloudPeerGroups(base.DCManagerLister): return group_format def get_parser(self, prog_name): - parser = super(ListSystemPeerSubcloudPeerGroups, - self).get_parser(prog_name) + parser = super(ListSystemPeerSubcloudPeerGroups, self).get_parser(prog_name) parser.add_argument( - 'peer', - help='Name or ID or UUID of system peer to list \ - associated subcloud peer groups.' + "peer", + help="Name or ID or UUID of system peer to list \ + associated subcloud peer groups.", ) return parser def _get_resources(self, parsed_args): system_peer_ref = parsed_args.peer dcmanager_client = self.app.client_manager.system_peer_manager - return dcmanager_client.system_peer_manager. \ - system_peer_list_peer_groups(system_peer_ref) + return dcmanager_client.system_peer_manager.system_peer_list_peer_groups( + system_peer_ref + ) class ShowSystemPeer(base.DCManagerShowOne): @@ -294,8 +293,7 @@ class ShowSystemPeer(base.DCManagerShowOne): parser = super(ShowSystemPeer, self).get_parser(prog_name) parser.add_argument( - 'peer', - help='UUID or ID of system peer to view the details.' + "peer", help="UUID or ID of system peer to view the details." ) return parser @@ -303,8 +301,9 @@ class ShowSystemPeer(base.DCManagerShowOne): def _get_resources(self, parsed_args): system_peer_ref = parsed_args.peer dcmanager_client = self.app.client_manager.system_peer_manager - return dcmanager_client.system_peer_manager.\ - system_peer_detail(system_peer_ref) + return dcmanager_client.system_peer_manager.system_peer_detail( + system_peer_ref + ) class DeleteSystemPeer(command.Command): @@ -313,21 +312,17 @@ class DeleteSystemPeer(command.Command): def get_parser(self, prog_name): parser = super(DeleteSystemPeer, self).get_parser(prog_name) - parser.add_argument( - 'peer', - help='UUID or ID of the system peer to delete.' - ) + parser.add_argument("peer", help="UUID or ID of the system peer to delete.") return parser def take_action(self, parsed_args): system_peer_ref = parsed_args.peer dcmanager_client = self.app.client_manager.system_peer_manager try: - dcmanager_client.system_peer_manager.\ - delete_system_peer(system_peer_ref) + dcmanager_client.system_peer_manager.delete_system_peer(system_peer_ref) except Exception as e: print(e) - msg = "Unable to delete system peer %s" % (system_peer_ref) + msg = f"Unable to delete system peer {system_peer_ref}" raise exceptions.DCManagerClientException(msg) @@ -340,82 +335,75 @@ class UpdateSystemPeer(base.DCManagerShowOne): def get_parser(self, prog_name): parser = super(UpdateSystemPeer, self).get_parser(prog_name) + parser.add_argument("peer", help="UUID or ID of the system peer to update.") + parser.add_argument( - 'peer', - help='UUID or ID of the system peer to update.' + "--peer-uuid", required=False, help="UUID of the new system peer." ) parser.add_argument( - '--peer-uuid', - required=False, - help='UUID of the new system peer.' + "--peer-name", required=False, help="Name for the new system peer." ) parser.add_argument( - '--peer-name', + "--manager-endpoint", required=False, - help='Name for the new system peer.' + help="URI of DC manager of peer System Controller.", ) parser.add_argument( - '--manager-endpoint', + "--manager-username", required=False, - help='URI of DC manager of peer System Controller.' + help="Administrative username (default admin).", ) parser.add_argument( - '--manager-username', + "--manager-password", required=False, - help='Administrative username (default admin).' - ) - - parser.add_argument( - '--manager-password', - required=False, - nargs='?', + nargs="?", const=True, - help='Admin user password for authenticating into the DC manager' + help="Admin user password for authenticating into the DC manager", ) parser.add_argument( - '--peer-controller-gateway-address', + "--peer-controller-gateway-address", required=False, - help='Gateway address of peer site controller node.' + help="Gateway address of peer site controller node.", ) parser.add_argument( - '--administrative-state', + "--administrative-state", required=False, - choices=['enabled', 'disabled'], - help='Administrative control of peer state (default enabled).' + choices=["enabled", "disabled"], + help="Administrative control of peer state (default enabled).", ) parser.add_argument( - '--heartbeat-interval', + "--heartbeat-interval", required=False, - help='Interval between heartbeat messages (in seconds) (default \ - 60).' + help="Interval between heartbeat messages (in seconds) (default \ + 60).", ) parser.add_argument( - '--heartbeat-failure-threshold', + "--heartbeat-failure-threshold", required=False, - help='Consecutive heartbeat failures before failure declared \ - (default 3).' + help="Consecutive heartbeat failures before failure declared \ + (default 3).", ) parser.add_argument( - '--heartbeat-failure-policy', + "--heartbeat-failure-policy", required=False, - choices=['alarm', 'rehome', 'delegate'], - help='Action to take with failure detection (default alarm).' + choices=["alarm", "rehome", "delegate"], + help="Action to take with failure detection (default alarm).", ) parser.add_argument( - '--heartbeat-maintenance-timeout', + "--heartbeat-maintenance-timeout", required=False, - help='Overall failure timeout during maintenance state (in \ - seconds) (default 600).' + help="Overall failure timeout during maintenance state (in \ + seconds) (default 600).", ) return parser @@ -425,49 +413,52 @@ class UpdateSystemPeer(base.DCManagerShowOne): dcmanager_client = self.app.client_manager.system_peer_manager kwargs = dict() if parsed_args.peer_uuid: - kwargs['peer_uuid'] = parsed_args.peer_uuid + kwargs["peer_uuid"] = parsed_args.peer_uuid if parsed_args.peer_name: - kwargs['peer_name'] = parsed_args.peer_name + kwargs["peer_name"] = parsed_args.peer_name if parsed_args.manager_endpoint: - kwargs['manager_endpoint'] = parsed_args.manager_endpoint + kwargs["manager_endpoint"] = parsed_args.manager_endpoint if parsed_args.manager_username: - kwargs['manager_username'] = parsed_args.manager_username + kwargs["manager_username"] = parsed_args.manager_username if parsed_args.manager_password: if parsed_args.manager_password is True: password = utils.prompt_for_password( - password_type='update', - item_type='system peer') + password_type="update", item_type="system peer" + ) kwargs["manager_password"] = base64.b64encode( - password.encode("utf-8")).decode("utf-8") + password.encode("utf-8") + ).decode("utf-8") else: - kwargs['manager_password'] = base64.b64encode( - parsed_args.manager_password.encode( - "utf-8")).decode("utf-8") + kwargs["manager_password"] = base64.b64encode( + parsed_args.manager_password.encode("utf-8") + ).decode("utf-8") if parsed_args.peer_controller_gateway_address: - kwargs['peer_controller_gateway_address'] = \ + kwargs["peer_controller_gateway_address"] = ( parsed_args.peer_controller_gateway_address + ) if parsed_args.administrative_state: - kwargs['administrative_state'] = parsed_args.administrative_state + kwargs["administrative_state"] = parsed_args.administrative_state if parsed_args.heartbeat_interval: - kwargs['heartbeat_interval'] = parsed_args.heartbeat_interval + kwargs["heartbeat_interval"] = parsed_args.heartbeat_interval if parsed_args.heartbeat_failure_threshold: - kwargs['heartbeat_failure_threshold'] = \ + kwargs["heartbeat_failure_threshold"] = ( parsed_args.heartbeat_failure_threshold + ) if parsed_args.heartbeat_failure_policy: - kwargs['heartbeat_failure_policy'] = \ - parsed_args.heartbeat_failure_policy + kwargs["heartbeat_failure_policy"] = parsed_args.heartbeat_failure_policy if parsed_args.heartbeat_maintenance_timeout: - kwargs['heartbeat_maintenance_timeout'] = \ + kwargs["heartbeat_maintenance_timeout"] = ( parsed_args.heartbeat_maintenance_timeout + ) if len(kwargs) == 0: error_msg = "Nothing to update" raise exceptions.DCManagerClientException(error_msg) try: - return dcmanager_client. \ - system_peer_manager.update_system_peer( - system_peer_ref, **kwargs) + return dcmanager_client.system_peer_manager.update_system_peer( + system_peer_ref, **kwargs + ) except Exception as e: print(e) - msg = "Unable to update system peer %s" % (system_peer_ref) + msg = f"Unable to update system peer {system_peer_ref}" raise exceptions.DCManagerClientException(msg) diff --git a/distributedcloud-client/dcmanagerclient/exceptions.py b/distributedcloud-client/dcmanagerclient/exceptions.py index f6fb679..f198236 100644 --- a/distributedcloud-client/dcmanagerclient/exceptions.py +++ b/distributedcloud-client/dcmanagerclient/exceptions.py @@ -1,5 +1,5 @@ # Copyright 2016 Ericsson AB -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ class DCManagerClientException(Exception): To correctly use this class, inherit from it and define a 'message' and 'code' properties. """ + message = "An unknown exception occurred" code = "UNKNOWN_EXCEPTION" @@ -30,7 +31,8 @@ class DCManagerClientException(Exception): def __init__(self, message=message): self.message = message super(DCManagerClientException, self).__init__( - '%s: %s' % (self.code, self.message)) + f"{self.code}: {self.message}" + ) class IllegalArgumentException(DCManagerClientException): diff --git a/distributedcloud-client/dcmanagerclient/shell.py b/distributedcloud-client/dcmanagerclient/shell.py index 4e824a9..c8381fb 100644 --- a/distributedcloud-client/dcmanagerclient/shell.py +++ b/distributedcloud-client/dcmanagerclient/shell.py @@ -23,12 +23,12 @@ import logging import os import sys -from cliff import app -from cliff import commandmanager +from cliff import app, commandmanager from cliff import help as cliff_help from osc_lib.command import command from dcmanagerclient import __version__ as dcmanager_version +from dcmanagerclient import exceptions from dcmanagerclient.api import client from dcmanagerclient.commands.v1 import alarm_manager as am from dcmanagerclient.commands.v1 import fw_update_manager as fum @@ -48,9 +48,6 @@ from dcmanagerclient.commands.v1 import sw_update_manager as swum from dcmanagerclient.commands.v1 import sw_update_options_manager as suom from dcmanagerclient.commands.v1 import sw_upgrade_manager as supm from dcmanagerclient.commands.v1 import system_peer_manager as sp -from dcmanagerclient import exceptions - -LOG = logging.getLogger(__name__) def env(*args, **kwargs): @@ -62,22 +59,24 @@ def env(*args, **kwargs): value = os.environ.get(arg) if value: return value - return kwargs.get('default', '') + return kwargs.get("default", "") class OpenStackHelpFormatter(argparse.HelpFormatter): - def __init__(self, prog, indent_increment=2, max_help_position=32, - width=None): + def __init__( + self, + prog, + indent_increment=2, + max_help_position=32, + width=None, + ): super(OpenStackHelpFormatter, self).__init__( - prog, - indent_increment, - max_help_position, - width + prog, indent_increment, max_help_position, width ) def start_section(self, heading): # Title-case the headings. - heading = '%s%s' % (heading[0].upper(), heading[1:]) + heading = f"{heading[0].upper()}{heading[1:]}" super(OpenStackHelpFormatter, self).start_section(heading) @@ -88,6 +87,7 @@ class HelpCommand(cliff_help.HelpCommand): arguments could use our custom HelpAction """ + def take_action(self, parsed_args): if parsed_args.cmd: super().take_action(parsed_args) @@ -113,18 +113,17 @@ class HelpAction(argparse.Action): max_len = 0 main_app = self.default parser.print_help(main_app.stdout) - main_app.stdout.write('\nCommands for API v1 :\n') + main_app.stdout.write("\nCommands for API v1 :\n") for name, ep in sorted(main_app.command_manager): factory = ep.load() cmd = factory(self, None) - one_liner = cmd.get_description().split('\n')[0] + one_liner = cmd.get_description().split("\n")[0] outputs.append((name, one_liner)) max_len = max(len(name), max_len) - for (name, one_liner) in outputs: - main_app.stdout.write(' %s %s\n' % (name.ljust(max_len), - one_liner)) + for name, one_liner in outputs: + main_app.stdout.write(f" {name.ljust(max_len)} {one_liner}\n") sys.exit(0) @@ -136,13 +135,16 @@ class BashCompletionCommand(command.Command): commands = set() options = set() - for option, _action in self.app.parser._option_string_actions.items(): + for ( + option, + _action, + ) in self.app.parser._option_string_actions.items(): options.add(option) for command_name, _cmd in self.app.command_manager: commands.add(command_name) - print(' '.join(commands | options)) + print(" ".join(commands | options)) class DCManagerShell(app.App): @@ -150,11 +152,11 @@ class DCManagerShell(app.App): super(DCManagerShell, self).__init__( description=__doc__.strip(), version=dcmanager_version, - command_manager=commandmanager.CommandManager('dcmanager.cli'), + command_manager=commandmanager.CommandManager("dcmanager.cli"), ) # Override default help command - self.command_manager.add_command('help', HelpCommand) + self.command_manager.add_command("help", HelpCommand) # Set v1 commands by default self._set_shell_commands(self._get_commands(version=1)) @@ -166,15 +168,14 @@ class DCManagerShell(app.App): log_lvl = logging.DEBUG if self.options.debug else logging.WARNING logging.basicConfig( format="%(levelname)s (%(module)s) %(message)s", - level=log_lvl + level=log_lvl, ) - logging.getLogger('iso8601').setLevel(logging.WARNING) + logging.getLogger("iso8601").setLevel(logging.WARNING) if self.options.verbose_level <= 1: - logging.getLogger('requests').setLevel(logging.WARNING) + logging.getLogger("requests").setLevel(logging.WARNING) - def build_option_parser(self, description, version, - argparse_kwargs=None): + def build_option_parser(self, description, version, argparse_kwargs=None): """Return an argparse option parser for this application. Subclasses may override this method to extend @@ -194,41 +195,44 @@ class DCManagerShell(app.App): description=description, add_help=False, formatter_class=OpenStackHelpFormatter, - **argparse_kwargs + **argparse_kwargs, ) parser.add_argument( - '--version', - action='version', - version='%(prog)s {0}'.format(version), - help='Show program\'s version number and exit.' + "--version", + action="version", + version=f"%(prog)s {version}", + help="Show program's version number and exit.", ) parser.add_argument( - '-v', '--verbose', - action='count', - dest='verbose_level', + "-v", + "--verbose", + action="count", + dest="verbose_level", default=self.DEFAULT_VERBOSE_LEVEL, - help='Increase verbosity of output. Can be repeated.', + help="Increase verbosity of output. Can be repeated.", ) parser.add_argument( - '--log-file', - action='store', + "--log-file", + action="store", default=None, - help='Specify a file to log output. Disabled by default.', + help="Specify a file to log output. Disabled by default.", ) parser.add_argument( - '-q', '--quiet', - action='store_const', - dest='verbose_level', + "-q", + "--quiet", + action="store_const", + dest="verbose_level", const=0, - help='Suppress output except warnings and errors.', + help="Suppress output except warnings and errors.", ) parser.add_argument( - '-h', '--help', + "-h", + "--help", action=HelpAction, nargs=0, default=self, # tricky @@ -236,183 +240,178 @@ class DCManagerShell(app.App): ) parser.add_argument( - '--debug', + "--debug", default=False, - action='store_true', - help='Show tracebacks on errors.', + action="store_true", + help="Show tracebacks on errors.", ) parser.add_argument( - '--dcmanager-url', - action='store', - dest='dcmanager_url', - default=env('DCMANAGER_URL'), - help='DC Manager API host (Env: DCMANAGER_URL)' + "--dcmanager-url", + action="store", + dest="dcmanager_url", + default=env("DCMANAGER_URL"), + help="DC Manager API host (Env: DCMANAGER_URL)", ) parser.add_argument( - '--dcmanager-api-version', - action='store', - dest='dcmanager_version', - default=env('DCMANAGER_API_VERSION', default='v1.0'), - help='DC Manager API version (default = v1.0) (Env: ' - 'DCMANAGER_API_VERSION)' + "--dcmanager-api-version", + action="store", + dest="dcmanager_version", + default=env("DCMANAGER_API_VERSION", default="v1.0"), + help="DC Manager API version (default = v1.0) (Env: " + "DCMANAGER_API_VERSION)", ) parser.add_argument( - '--dcmanager-service-type', - action='store', - dest='service_type', - default=env('DCMANAGER_SERVICE_TYPE', - default='dcmanager'), - help='DC Manager service-type (should be the same name as in ' - 'keystone-endpoint) (default = dcmanager) (Env: ' - 'DCMANAGER_SERVICE_TYPE)' + "--dcmanager-service-type", + action="store", + dest="service_type", + default=env("DCMANAGER_SERVICE_TYPE", default="dcmanager"), + help="DC Manager service-type (should be the same name as in " + "keystone-endpoint) (default = dcmanager) (Env: " + "DCMANAGER_SERVICE_TYPE)", ) parser.add_argument( - '--os-endpoint-type', - action='store', - dest='endpoint_type', - default=env('OS_ENDPOINT_TYPE', - default='internalURL'), - help='DC Manager endpoint-type (should be the same name as in ' - 'keystone-endpoint) (default = OS_ENDPOINT_TYPE)' + "--os-endpoint-type", + action="store", + dest="endpoint_type", + default=env("OS_ENDPOINT_TYPE", default="internalURL"), + help="DC Manager endpoint-type (should be the same name as in " + "keystone-endpoint) (default = OS_ENDPOINT_TYPE)", ) parser.add_argument( - '--os-username', - action='store', - dest='username', - default=env('OS_USERNAME', default='admin'), - help='Authentication username (Env: OS_USERNAME)' + "--os-username", + action="store", + dest="username", + default=env("OS_USERNAME", default="admin"), + help="Authentication username (Env: OS_USERNAME)", ) parser.add_argument( - '--os-password', - action='store', - dest='password', - default=env('OS_PASSWORD'), - help='Authentication password (Env: OS_PASSWORD)' + "--os-password", + action="store", + dest="password", + default=env("OS_PASSWORD"), + help="Authentication password (Env: OS_PASSWORD)", ) parser.add_argument( - '--os-tenant-id', - action='store', - dest='tenant_id', - default=env('OS_TENANT_ID', 'OS_PROJECT_ID'), - help='Authentication tenant identifier (Env: OS_TENANT_ID)' + "--os-tenant-id", + action="store", + dest="tenant_id", + default=env("OS_TENANT_ID", "OS_PROJECT_ID"), + help="Authentication tenant identifier (Env: OS_TENANT_ID)", ) parser.add_argument( - '--os-project-id', - action='store', - dest='project_id', - default=env('OS_TENANT_ID', 'OS_PROJECT_ID'), - help='Authentication project identifier (Env: OS_TENANT_ID' - ' or OS_PROJECT_ID), will use tenant_id if both tenant_id' - ' and project_id are set' + "--os-project-id", + action="store", + dest="project_id", + default=env("OS_TENANT_ID", "OS_PROJECT_ID"), + help="Authentication project identifier (Env: OS_TENANT_ID" + " or OS_PROJECT_ID), will use tenant_id if both tenant_id" + " and project_id are set", ) parser.add_argument( - '--os-tenant-name', - action='store', - dest='tenant_name', - default=env('OS_TENANT_NAME', 'OS_PROJECT_NAME'), - help='Authentication tenant name (Env: OS_TENANT_NAME)' + "--os-tenant-name", + action="store", + dest="tenant_name", + default=env("OS_TENANT_NAME", "OS_PROJECT_NAME"), + help="Authentication tenant name (Env: OS_TENANT_NAME)", ) parser.add_argument( - '--os-project-name', - action='store', - dest='project_name', - default=env('OS_TENANT_NAME', 'OS_PROJECT_NAME'), - help='Authentication project name (Env: OS_TENANT_NAME' - ' or OS_PROJECT_NAME), will use tenant_name if both' - ' tenant_name and project_name are set' + "--os-project-name", + action="store", + dest="project_name", + default=env("OS_TENANT_NAME", "OS_PROJECT_NAME"), + help="Authentication project name (Env: OS_TENANT_NAME" + " or OS_PROJECT_NAME), will use tenant_name if both" + " tenant_name and project_name are set", ) parser.add_argument( - '--os-auth-token', - action='store', - dest='token', - default=env('OS_AUTH_TOKEN'), - help='Authentication token (Env: OS_AUTH_TOKEN)' + "--os-auth-token", + action="store", + dest="token", + default=env("OS_AUTH_TOKEN"), + help="Authentication token (Env: OS_AUTH_TOKEN)", ) parser.add_argument( - '--os-project-domain-name', - action='store', - dest='project_domain_name', - default=env('OS_PROJECT_DOMAIN_NAME'), - help='Authentication project domain name or ID' - ' (Env: OS_PROJECT_DOMAIN_NAME)' + "--os-project-domain-name", + action="store", + dest="project_domain_name", + default=env("OS_PROJECT_DOMAIN_NAME"), + help="Authentication project domain name or ID" + " (Env: OS_PROJECT_DOMAIN_NAME)", ) parser.add_argument( - '--os-project-domain-id', - action='store', - dest='project_domain_id', - default=env('OS_PROJECT_DOMAIN_ID'), - help='Authentication project domain ID' - ' (Env: OS_PROJECT_DOMAIN_ID)' + "--os-project-domain-id", + action="store", + dest="project_domain_id", + default=env("OS_PROJECT_DOMAIN_ID"), + help="Authentication project domain ID" " (Env: OS_PROJECT_DOMAIN_ID)", ) parser.add_argument( - '--os-user-domain-name', - action='store', - dest='user_domain_name', - default=env('OS_USER_DOMAIN_NAME'), - help='Authentication user domain name' - ' (Env: OS_USER_DOMAIN_NAME)' + "--os-user-domain-name", + action="store", + dest="user_domain_name", + default=env("OS_USER_DOMAIN_NAME"), + help="Authentication user domain name" " (Env: OS_USER_DOMAIN_NAME)", ) parser.add_argument( - '--os-user-domain-id', - action='store', - dest='user_domain_id', - default=env('OS_USER_DOMAIN_ID'), - help='Authentication user domain name' - ' (Env: OS_USER_DOMAIN_ID)' + "--os-user-domain-id", + action="store", + dest="user_domain_id", + default=env("OS_USER_DOMAIN_ID"), + help="Authentication user domain name" " (Env: OS_USER_DOMAIN_ID)", ) parser.add_argument( - '--os-auth-url', - action='store', - dest='auth_url', - default=env('OS_AUTH_URL'), - help='Authentication URL (Env: OS_AUTH_URL)' + "--os-auth-url", + action="store", + dest="auth_url", + default=env("OS_AUTH_URL"), + help="Authentication URL (Env: OS_AUTH_URL)", ) parser.add_argument( - '--os-cacert', - action='store', - dest='cacert', - default=env('OS_CACERT'), - help='Authentication CA Certificate (Env: OS_CACERT)' + "--os-cacert", + action="store", + dest="cacert", + default=env("OS_CACERT"), + help="Authentication CA Certificate (Env: OS_CACERT)", ) parser.add_argument( - '--insecure', - action='store_true', - dest='insecure', - default=env('DCMANAGERCLIENT_INSECURE', default=False), - help='Disables SSL/TLS certificate verification ' - '(Env: DCMANAGERCLIENT_INSECURE)' + "--insecure", + action="store_true", + dest="insecure", + default=env("DCMANAGERCLIENT_INSECURE", default=False), + help="Disables SSL/TLS certificate verification " + "(Env: DCMANAGERCLIENT_INSECURE)", ) parser.add_argument( - '--profile', - dest='profile', - metavar='HMAC_KEY', - help='HMAC key to use for encrypting context data for performance ' - 'profiling of operation. This key should be one of the ' - 'values configured for the osprofiler middleware in ' - 'dcmanager, it is specified in the profiler section of the ' - 'dcmanager configuration ' - '(i.e. /etc/dcmanager/dcmanager.conf). ' - 'Without the key, profiling will not be triggered even if ' - 'osprofiler is enabled on the server side.' + "--profile", + dest="profile", + metavar="HMAC_KEY", + help="HMAC key to use for encrypting context data for performance " + "profiling of operation. This key should be one of the " + "values configured for the osprofiler middleware in " + "dcmanager, it is specified in the profiler section of the " + "dcmanager configuration " + "(i.e. /etc/dcmanager/dcmanager.conf). " + "Without the key, profiling will not be triggered even if " + "osprofiler is enabled on the server side.", ) return parser @@ -424,38 +423,43 @@ class DCManagerShell(app.App): self._set_shell_commands(self._get_commands(ver)) - do_help = ['help', '-h', 'bash-completion', 'complete'] + do_help = ["help", "-h", "bash-completion", "complete"] # bash-completion should not require authentication. - skip_auth = ''.join(argv) in do_help + skip_auth = "".join(argv) in do_help if skip_auth: self.options.auth_url = None - if self.options.auth_url and not self.options.token \ - and not skip_auth: + if self.options.auth_url and not self.options.token and not skip_auth: if not self.options.tenant_name: raise exceptions.CommandError( - ("You must provide a tenant_name " - "via --os-tenantname env[OS_TENANT_NAME]") + ( + "You must provide a tenant_name " + "via --os-tenantname env[OS_TENANT_NAME]" + ) ) if not self.options.username: raise exceptions.CommandError( - ("You must provide a username " - "via --os-username env[OS_USERNAME]") + ( + "You must provide a username " + "via --os-username env[OS_USERNAME]" + ) ) if not self.options.password: raise exceptions.CommandError( - ("You must provide a password " - "via --os-password env[OS_PASSWORD]") + ( + "You must provide a password " + "via --os-password env[OS_PASSWORD]" + ) ) kwargs = { - 'user_domain_name': self.options.user_domain_name, - 'user_domain_id': self.options.user_domain_id, - 'project_domain_name': self.options.project_domain_name, - 'project_domain_id': self.options.project_domain_id + "user_domain_name": self.options.user_domain_name, + "user_domain_id": self.options.user_domain_id, + "project_domain_name": self.options.project_domain_name, + "project_domain_id": self.options.project_domain_id, } self.client = client.client( @@ -471,40 +475,43 @@ class DCManagerShell(app.App): cacert=self.options.cacert, insecure=self.options.insecure, profile=self.options.profile, - **kwargs + **kwargs, ) if not self.options.auth_url and not skip_auth: raise exceptions.CommandError( - ("You must provide an auth url via either " - "--os-auth-url or env[OS_AUTH_URL] or " - "specify an auth_system which defines a" - " default url with --os-auth-system or env[OS_AUTH_SYSTEM]") + ( + "You must provide an auth url via either " + "--os-auth-url or env[OS_AUTH_URL] or " + "specify an auth_system which defines a" + " default url with --os-auth-system or env[OS_AUTH_SYSTEM]" + ) ) # Adding client_manager variable to make dcmanager client work with # unified OpenStack client. ClientManager = type( - 'ClientManager', + "ClientManager", (object,), - dict(subcloud_manager=self.client, - subcloud_backup_manager=self.client, - subcloud_group_manager=self.client, - subcloud_deploy_manager=self.client, - system_peer_manager=self.client, - alarm_manager=self.client, - fw_update_manager=self.client, - sw_patch_manager=self.client, - strategy_step_manager=self.client, - sw_update_options_manager=self.client, - sw_upgrade_manager=self.client, - sw_deploy_manager=self.client, - kube_upgrade_manager=self.client, - kube_rootca_update_manager=self.client, - sw_prestage_manager=self.client, - phased_subcloud_deploy_manager=self.client, - subcloud_peer_group_manager=self.client, - peer_group_association_manager=self.client) + dict( + subcloud_manager=self.client, + subcloud_backup_manager=self.client, + subcloud_group_manager=self.client, + subcloud_deploy_manager=self.client, + system_peer_manager=self.client, + alarm_manager=self.client, + fw_update_manager=self.client, + sw_patch_manager=self.client, + strategy_step_manager=self.client, + sw_update_options_manager=self.client, + sw_upgrade_manager=self.client, + kube_upgrade_manager=self.client, + kube_rootca_update_manager=self.client, + sw_prestage_manager=self.client, + phased_subcloud_deploy_manager=self.client, + subcloud_peer_group_manager=self.client, + peer_group_association_manager=self.client, + ), ) self.client_manager = ClientManager() @@ -513,7 +520,7 @@ class DCManagerShell(app.App): self.command_manager.add_command(k, v) def _clear_shell_commands(self): - exclude_cmds = ['help', 'complete'] + exclude_cmds = ["help", "complete"] cmds = self.command_manager.commands.copy() for k, _v in cmds.items(): @@ -536,16 +543,13 @@ class DCManagerShell(app.App): "fw-update-strategy create": fum.CreateFwUpdateStrategy, "fw-update-strategy delete": fum.DeleteFwUpdateStrategy, "fw-update-strategy show": fum.ShowFwUpdateStrategy, - "kube-rootca-update-strategy abort": - krum.AbortKubeRootcaUpdateStrategy, - "kube-rootca-update-strategy apply": - krum.ApplyKubeRootcaUpdateStrategy, + "kube-rootca-update-strategy abort": krum.AbortKubeRootcaUpdateStrategy, + "kube-rootca-update-strategy apply": krum.ApplyKubeRootcaUpdateStrategy, "kube-rootca-update-strategy create": krum.CreateKubeRootcaUpdateStrategy, "kube-rootca-update-strategy delete": krum.DeleteKubeRootcaUpdateStrategy, - "kube-rootca-update-strategy show": - krum.ShowKubeRootcaUpdateStrategy, + "kube-rootca-update-strategy show": krum.ShowKubeRootcaUpdateStrategy, "kube-upgrade-strategy abort": kupm.AbortKubeUpgradeStrategy, "kube-upgrade-strategy apply": kupm.ApplyKubeUpgradeStrategy, "kube-upgrade-strategy create": kupm.CreateKubeUpgradeStrategy, @@ -619,8 +623,7 @@ class DCManagerShell(app.App): "subcloud-peer-group add": pm.AddSubcloudPeerGroup, "subcloud-peer-group delete": pm.DeleteSubcloudPeerGroup, "subcloud-peer-group list": pm.ListSubcloudPeerGroup, - "subcloud-peer-group list-subclouds": - pm.ListSubcloudPeerGroupSubclouds, + "subcloud-peer-group list-subclouds": pm.ListSubcloudPeerGroupSubclouds, "subcloud-peer-group migrate": pm.MigrateSubcloudPeerGroup, "subcloud-peer-group show": pm.ShowSubcloudPeerGroup, "subcloud-peer-group status": pm.StatusSubcloudPeerGroup, @@ -646,5 +649,5 @@ def main(argv=None): return DCManagerShell().run(argv) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv[1:])) diff --git a/distributedcloud-client/dcmanagerclient/tests/base.py b/distributedcloud-client/dcmanagerclient/tests/base.py index fbed3c5..56fa725 100644 --- a/distributedcloud-client/dcmanagerclient/tests/base.py +++ b/distributedcloud-client/dcmanagerclient/tests/base.py @@ -18,53 +18,50 @@ import json import mock -from oslo_utils import timeutils import testtools +from oslo_utils import timeutils from dcmanagerclient.api import base as api_base - # Subcloud sample data -BOOTSTRAP_ADDRESS = '10.10.10.12' +BOOTSTRAP_ADDRESS = "10.10.10.12" TIME_NOW = timeutils.utcnow().isoformat() -ID = '1' -ID_1 = '2' -NAME = 'subcloud1' +ID = "1" +ID_1 = "2" +NAME = "subcloud1" SYSTEM_MODE = "duplex" -DESCRIPTION = 'subcloud1 description' -LOCATION = 'subcloud1 location' -SOFTWARE_VERSION = '12.34' -MANAGEMENT_STATE = 'unmanaged' -AVAILABILITY_STATUS = 'offline' -DEPLOY_STATUS = 'not-deployed' -SYNC_STATUS = 'unknown' -ERROR_DESCRIPTION = 'No errors present' -REGION_NAME = '2ec93dfb654846909efe61d1b39dd2ce' -DEPLOY_STATE_PRE_DEPLOY = 'pre-deploy' -DEPLOY_STATE_PRE_RESTORE = 'pre-restore' -MANAGEMENT_SUBNET = '192.168.101.0/24' -MANAGEMENT_START_IP = '192.168.101.2' -MANAGEMENT_END_IP = '192.168.101.50' -MANAGEMENT_GATEWAY_IP = '192.168.101.1' -SYSTEMCONTROLLER_GATEWAY_IP = '192.168.204.101' +DESCRIPTION = "subcloud1 description" +LOCATION = "subcloud1 location" +SOFTWARE_VERSION = "12.34" +MANAGEMENT_STATE = "unmanaged" +AVAILABILITY_STATUS = "offline" +DEPLOY_STATUS = "not-deployed" +SYNC_STATUS = "unknown" +ERROR_DESCRIPTION = "No errors present" +REGION_NAME = "2ec93dfb654846909efe61d1b39dd2ce" +DEPLOY_STATE_PRE_DEPLOY = "pre-deploy" +DEPLOY_STATE_PRE_RESTORE = "pre-restore" +MANAGEMENT_SUBNET = "192.168.101.0/24" +MANAGEMENT_START_IP = "192.168.101.2" +MANAGEMENT_END_IP = "192.168.101.50" +MANAGEMENT_GATEWAY_IP = "192.168.101.1" +SYSTEMCONTROLLER_GATEWAY_IP = "192.168.204.101" EXTERNAL_OAM_SUBNET = "10.10.10.0/24" EXTERNAL_OAM_GATEWAY_ADDRESS = "10.10.10.1" EXTERNAL_OAM_FLOATING_ADDRESS = "10.10.10.12" -DEFAULT_SUBCLOUD_GROUP_ID = '1' -DEPLOY_CONFIG_SYNC_STATUS = 'Deployment: configurations up-to-date' +DEFAULT_SUBCLOUD_GROUP_ID = "1" +DEPLOY_CONFIG_SYNC_STATUS = "Deployment: configurations up-to-date" SUBCLOUD_PEERGROUP_ID = None SUBCLOUD_REHOME_DATA = None -BACKUP_STATUS = 'None' -BACKUP_DATETIME = 'None' -PRESTAGE_STATUS = 'None' +BACKUP_STATUS = "None" +BACKUP_DATETIME = "None" +PRESTAGE_STATUS = "None" PRESTAGE_VERSIONS = None SYNC = None # Useful for subcloud name configuration NAME_SC2 = "subcloud2" -SET_FIELD_VALUE_DICT = { - "region_name": None -} +SET_FIELD_VALUE_DICT = {"region_name": None} # Subcloud CLI resource object SUBCLOUD_RESOURCE = api_base.Subcloud( @@ -90,7 +87,8 @@ SUBCLOUD_RESOURCE = api_base.Subcloud( backup_datetime=BACKUP_DATETIME, prestage_status=PRESTAGE_STATUS, prestage_versions=PRESTAGE_VERSIONS, - region_name=REGION_NAME) + region_name=REGION_NAME, +) # Subcloud CLI resource object with peerid rehome data SUBCLOUD_RESOURCE_WITH_PEERID = api_base.Subcloud( @@ -115,7 +113,8 @@ SUBCLOUD_RESOURCE_WITH_PEERID = api_base.Subcloud( backup_status=BACKUP_STATUS, backup_datetime=BACKUP_DATETIME, prestage_status=PRESTAGE_STATUS, - prestage_versions=PRESTAGE_VERSIONS) + prestage_versions=PRESTAGE_VERSIONS, +) # Subcloud CLI resource object with all list fields SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS = api_base.Subcloud( @@ -141,7 +140,8 @@ SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS = api_base.Subcloud( backup_status=BACKUP_STATUS, backup_datetime=BACKUP_DATETIME, prestage_status=PRESTAGE_STATUS, - prestage_versions=PRESTAGE_VERSIONS) + prestage_versions=PRESTAGE_VERSIONS, +) # Subcloud result values returned from various API calls (e.g. subcloud show) SUBCLOUD_FIELD_RESULT_LIST = ( @@ -162,7 +162,7 @@ SUBCLOUD_FIELD_RESULT_LIST = ( TIME_NOW, TIME_NOW, BACKUP_STATUS, - BACKUP_DATETIME + BACKUP_DATETIME, ) # Subcloud result values returned from various API calls (e.g. subcloud show) @@ -187,23 +187,26 @@ SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID = ( BACKUP_STATUS, BACKUP_DATETIME, PRESTAGE_STATUS, - PRESTAGE_VERSIONS + PRESTAGE_VERSIONS, ) -EMPTY_SUBCLOUD_FIELD_RESULT = (('',) * len(SUBCLOUD_FIELD_RESULT_LIST),) -EMPTY_SUBCLOUD_FIELD_RESULT_WITH_PEERID_REHOME_DATA = \ - (('',) * len(SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID),) +EMPTY_SUBCLOUD_FIELD_RESULT = (("",) * len(SUBCLOUD_FIELD_RESULT_LIST),) +EMPTY_SUBCLOUD_FIELD_RESULT_WITH_PEERID_REHOME_DATA = ( + ("",) * len(SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID), +) # Create subcloud all fields based on SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID # and add an additional "sync" field DEPLOY_STATUS_IDX = SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID.index(DEPLOY_STATUS) -SUBCLOUD_ALL_FIELDS_RESULT_LIST = \ - SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID[:DEPLOY_STATUS_IDX + 1] + \ - (SYNC,) + \ - SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID[DEPLOY_STATUS_IDX + 1:] +SUBCLOUD_ALL_FIELDS_RESULT_LIST = ( + SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID[: DEPLOY_STATUS_IDX + 1] + + (SYNC,) + + SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID[DEPLOY_STATUS_IDX + 1:] +) -EMPTY_SUBCLOUD_ALL_FIELDS_RESULT = \ - (('',) * len(SUBCLOUD_ALL_FIELDS_RESULT_LIST),) +EMPTY_SUBCLOUD_ALL_FIELDS_RESULT = ( + ("",) * len(SUBCLOUD_ALL_FIELDS_RESULT_LIST), +) # Subcloud result values returned from subcloud list command SUBCLOUD_LIST_RESULT = ( @@ -214,10 +217,10 @@ SUBCLOUD_LIST_RESULT = ( DEPLOY_STATUS, SYNC_STATUS, BACKUP_STATUS, - PRESTAGE_STATUS + PRESTAGE_STATUS, ) -EMPTY_SUBCLOUD_LIST_RESULT = (('',) * len(SUBCLOUD_LIST_RESULT),) +EMPTY_SUBCLOUD_LIST_RESULT = (("",) * len(SUBCLOUD_LIST_RESULT),) FAKE_BOOTSTRAP_VALUES = { "system_mode": SYSTEM_MODE, @@ -231,12 +234,10 @@ FAKE_BOOTSTRAP_VALUES = { "external_oam_subnet": EXTERNAL_OAM_SUBNET, "external_oam_gateway_address": EXTERNAL_OAM_GATEWAY_ADDRESS, "external_oam_floating_address": EXTERNAL_OAM_FLOATING_ADDRESS, - 'backup_status': BACKUP_STATUS, - 'backup_datetime': BACKUP_DATETIME, - 'backup_status': BACKUP_STATUS, - 'backup_datetime': BACKUP_DATETIME, - 'prestage_status': PRESTAGE_STATUS, - 'prestage_versions': PRESTAGE_VERSIONS + "backup_status": BACKUP_STATUS, + "backup_datetime": BACKUP_DATETIME, + "prestage_status": PRESTAGE_STATUS, + "prestage_versions": PRESTAGE_VERSIONS, } FAKE_INSTALL_VALUES = { @@ -266,7 +267,7 @@ class FakeResponse(object): self.status_code = status_code self.content = content self.headers = {} - self.text = '' + self.text = "" def json(self): return json.loads(self.content) @@ -275,15 +276,13 @@ class FakeResponse(object): class BaseClientTest(testtools.TestCase): _client = None - def setUp(self): - super(BaseClientTest, self).setUp() - def mock_http_get(self, content, status_code=200): if isinstance(content, dict): content = json.dumps(content) self._client.http_client.get = mock.MagicMock( - return_value=FakeResponse(status_code, content)) + return_value=FakeResponse(status_code, content) + ) return self._client.http_client.get @@ -292,7 +291,8 @@ class BaseClientTest(testtools.TestCase): content = json.dumps(content) self._client.http_client.post = mock.MagicMock( - return_value=FakeResponse(status_code, content)) + return_value=FakeResponse(status_code, content) + ) return self._client.http_client.post @@ -301,13 +301,15 @@ class BaseClientTest(testtools.TestCase): content = json.dumps(content) self._client.http_client.put = mock.MagicMock( - return_value=FakeResponse(status_code, content)) + return_value=FakeResponse(status_code, content) + ) return self._client.http_client.put def mock_http_delete(self, status_code=204): self._client.http_client.delete = mock.MagicMock( - return_value=FakeResponse(status_code)) + return_value=FakeResponse(status_code) + ) return self._client.http_client.delete @@ -319,7 +321,7 @@ class BaseCommandTest(testtools.TestCase): self.client = self.app.client_manager.subcloud_manager self.parsed_args = None - def call(self, command, app_args=None, prog_name=''): + def call(self, command, app_args=None, prog_name=""): if app_args is None: app_args = [] cmd = command(self.app, app_args) diff --git a/distributedcloud-client/dcmanagerclient/tests/base_shell_test.py b/distributedcloud-client/dcmanagerclient/tests/base_shell_test.py index d2dbf42..2acea55 100644 --- a/distributedcloud-client/dcmanagerclient/tests/base_shell_test.py +++ b/distributedcloud-client/dcmanagerclient/tests/base_shell_test.py @@ -1,6 +1,6 @@ # Copyright 2015 Huawei Technologies Co., Ltd. # Copyright 2016 Ericsson AB. -# Copyright (c) 2017, 2019, 2021 Wind River Systems, Inc. +# Copyright (c) 2017, 2019, 2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -37,7 +37,7 @@ class BaseShellTests(testtools.TestCase): _shell = shell.DCManagerShell() _shell.run(argstr.split()) except SystemExit: - exc_type, exc_value, exc_traceback = sys.exc_info() + _, exc_value, _ = sys.exc_info() self.assertEqual(0, exc_value.code) finally: stdout = sys.stdout.getvalue() diff --git a/distributedcloud-client/dcmanagerclient/tests/test_client.py b/distributedcloud-client/dcmanagerclient/tests/test_client.py index 8493205..67f49c0 100644 --- a/distributedcloud-client/dcmanagerclient/tests/test_client.py +++ b/distributedcloud-client/dcmanagerclient/tests/test_client.py @@ -1,7 +1,7 @@ # Copyright 2015 - Huawei Technologies Co., Ltd. # Copyright 2016 - StackStorm, Inc. # Copyright 2016 - Ericsson AB. -# Copyright (c) 2017, 2019, 2021 Wind River Systems, Inc. +# Copyright (c) 2017, 2019, 2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,208 +21,232 @@ import tempfile import uuid import mock -import testtools - import osprofiler.profiler +import testtools from dcmanagerclient.api import client -AUTH_HTTP_URL = 'http://localhost:35357/v3' -AUTH_HTTPS_URL = AUTH_HTTP_URL.replace('http', 'https') -DCMANAGER_HTTP_URL = 'http://localhost:8119/v1.0' -DCMANAGER_HTTPS_URL = DCMANAGER_HTTP_URL.replace('http', 'https') -PROFILER_HMAC_KEY = 'SECRET_HMAC_KEY' -FAKE_KWARGS = {'user_domain_name': 'fake_user_domain_name', - 'user_domain_id': 'fake_user_domain_id', - 'project_domain_name': 'fake_project_domain_name', - 'project_domain_id': 'fake_project_domain_id'} +AUTH_HTTP_URL = "http://localhost:35357/v3" +AUTH_HTTPS_URL = AUTH_HTTP_URL.replace("http", "https") +DCMANAGER_HTTP_URL = "http://localhost:8119/v1.0" +DCMANAGER_HTTPS_URL = DCMANAGER_HTTP_URL.replace("http", "https") +PROFILER_HMAC_KEY = "SECRET_HMAC_KEY" +FAKE_KWARGS = { + "user_domain_name": "fake_user_domain_name", + "user_domain_id": "fake_user_domain_id", + "project_domain_name": "fake_project_domain_name", + "project_domain_id": "fake_project_domain_id", +} class BaseClientTests(testtools.TestCase): - @mock.patch('keystoneauth1.session.Session') - @mock.patch('dcmanagerclient.api.httpclient.HTTPClient') - def test_dcmanager_url_default(self, mock, mock_keystone_auth_session): + @mock.patch("keystoneauth1.session.Session") + @mock.patch("dcmanagerclient.api.httpclient.HTTPClient") + def test_dcmanager_url_default(self, mock_client, mock_keystone_auth_session): keystone_session_instance = mock_keystone_auth_session.return_value - token = keystone_session_instance.get_token.return_value = \ - str(uuid.uuid4()) - project_id = keystone_session_instance.get_project_id.return_value = \ - str(uuid.uuid4()) - user_id = keystone_session_instance.get_user_id.return_value = \ - str(uuid.uuid4()) - keystone_session_instance.get_endpoint.return_value = \ - DCMANAGER_HTTP_URL + token = keystone_session_instance.get_token.return_value = str(uuid.uuid4()) + project_id = keystone_session_instance.get_project_id.return_value = str( + uuid.uuid4() + ) + user_id = keystone_session_instance.get_user_id.return_value = str( + uuid.uuid4() + ) + keystone_session_instance.get_endpoint.return_value = DCMANAGER_HTTP_URL - expected_args = ( - DCMANAGER_HTTP_URL, token, project_id, user_id) + expected_args = (DCMANAGER_HTTP_URL, token, project_id, user_id) - expected_kwargs = { - 'cacert': None, - 'insecure': False - } + expected_kwargs = {"cacert": None, "insecure": False} - client.client(username='dcmanager', project_name='dcmanager', - auth_url=AUTH_HTTP_URL, api_key='password', - **FAKE_KWARGS) - self.assertTrue(mock.called) - self.assertEqual(mock.call_args[0], expected_args) - self.assertDictEqual(mock.call_args[1], expected_kwargs) + client.client( + username="dcmanager", + project_name="dcmanager", + auth_url=AUTH_HTTP_URL, + api_key="password", + **FAKE_KWARGS + ) + self.assertTrue(mock_client.called) + self.assertEqual(mock_client.call_args[0], expected_args) + self.assertDictEqual(mock_client.call_args[1], expected_kwargs) - @mock.patch('keystoneauth1.session.Session') - @mock.patch('dcmanagerclient.api.httpclient.HTTPClient') - def test_dcmanager_url_https_insecure(self, mock, - mock_keystone_auth_session): + @mock.patch("keystoneauth1.session.Session") + @mock.patch("dcmanagerclient.api.httpclient.HTTPClient") + def test_dcmanager_url_https_insecure( + self, mock_client, mock_keystone_auth_session + ): keystone_session_instance = mock_keystone_auth_session.return_value - token = keystone_session_instance.get_token.return_value = \ - str(uuid.uuid4()) - project_id = keystone_session_instance.get_project_id.return_value = \ - str(uuid.uuid4()) - user_id = keystone_session_instance.get_user_id.return_value = \ - str(uuid.uuid4()) - keystone_session_instance.get_endpoint.return_value = \ - DCMANAGER_HTTP_URL + token = keystone_session_instance.get_token.return_value = str(uuid.uuid4()) + project_id = keystone_session_instance.get_project_id.return_value = str( + uuid.uuid4() + ) + user_id = keystone_session_instance.get_user_id.return_value = str( + uuid.uuid4() + ) + keystone_session_instance.get_endpoint.return_value = DCMANAGER_HTTP_URL expected_args = (DCMANAGER_HTTPS_URL, token, project_id, user_id) - expected_kwargs = { - 'cacert': None, - 'insecure': True - } + expected_kwargs = {"cacert": None, "insecure": True} - client.client(dcmanager_url=DCMANAGER_HTTPS_URL, username='dcmanager', - project_name='dcmanager', auth_url=AUTH_HTTP_URL, - api_key='password', cacert=None, insecure=True, - **FAKE_KWARGS) + client.client( + dcmanager_url=DCMANAGER_HTTPS_URL, + username="dcmanager", + project_name="dcmanager", + auth_url=AUTH_HTTP_URL, + api_key="password", + cacert=None, + insecure=True, + **FAKE_KWARGS + ) - self.assertTrue(mock.called) - self.assertEqual(mock.call_args[0], expected_args) - self.assertDictEqual(mock.call_args[1], expected_kwargs) + self.assertTrue(mock_client.called) + self.assertEqual(mock_client.call_args[0], expected_args) + self.assertDictEqual(mock_client.call_args[1], expected_kwargs) - @mock.patch('keystoneauth1.session.Session') - @mock.patch('dcmanagerclient.api.httpclient.HTTPClient') - def test_dcmanager_url_https_secure(self, mock, - mock_keystone_auth_session): - fd, path = tempfile.mkstemp(suffix='.pem') + @mock.patch("keystoneauth1.session.Session") + @mock.patch("dcmanagerclient.api.httpclient.HTTPClient") + def test_dcmanager_url_https_secure( + self, mock_client, mock_keystone_auth_session + ): + fd, path = tempfile.mkstemp(suffix=".pem") keystone_session_instance = mock_keystone_auth_session.return_value - token = keystone_session_instance.get_token.return_value = \ - str(uuid.uuid4()) - project_id = keystone_session_instance.get_project_id.return_value = \ - str(uuid.uuid4()) - user_id = keystone_session_instance.get_user_id.return_value = \ - str(uuid.uuid4()) - keystone_session_instance.get_endpoint.return_value = \ - DCMANAGER_HTTPS_URL + token = keystone_session_instance.get_token.return_value = str(uuid.uuid4()) + project_id = keystone_session_instance.get_project_id.return_value = str( + uuid.uuid4() + ) + user_id = keystone_session_instance.get_user_id.return_value = str( + uuid.uuid4() + ) + keystone_session_instance.get_endpoint.return_value = DCMANAGER_HTTPS_URL expected_args = (DCMANAGER_HTTPS_URL, token, project_id, user_id) - expected_kwargs = { - 'cacert': path, - 'insecure': False - } + expected_kwargs = {"cacert": path, "insecure": False} try: client.client( dcmanager_url=DCMANAGER_HTTPS_URL, - username='dcmanager', - project_name='dcmanager', + username="dcmanager", + project_name="dcmanager", auth_url=AUTH_HTTP_URL, - api_key='password', + api_key="password", cacert=path, - insecure=False, **FAKE_KWARGS) + insecure=False, + **FAKE_KWARGS + ) finally: os.close(fd) os.unlink(path) - self.assertTrue(mock.called) - self.assertEqual(mock.call_args[0], expected_args) - self.assertDictEqual(mock.call_args[1], expected_kwargs) + self.assertTrue(mock_client.called) + self.assertEqual(mock_client.call_args[0], expected_args) + self.assertDictEqual(mock_client.call_args[1], expected_kwargs) - @mock.patch('keystoneauth1.session.Session') - def test_dcmanager_url_https_bad_cacert(self, mock_keystone_auth_session): + @mock.patch("keystoneauth1.session.Session") + def test_dcmanager_url_https_bad_cacert(self, _mock_keystone_auth_session): self.assertRaises( ValueError, client.client, dcmanager_url=DCMANAGER_HTTPS_URL, - username='dcmanager', - project_name='dcmanager', - api_key='password', + username="dcmanager", + project_name="dcmanager", + api_key="password", auth_url=AUTH_HTTP_URL, - cacert='/path/to/foobar', - insecure=False, **FAKE_KWARGS) + cacert="/path/to/foobar", + insecure=False, + **FAKE_KWARGS + ) - @mock.patch('logging.Logger.warning') - @mock.patch('keystoneauth1.session.Session') - def test_dcmanager_url_https_bad_insecure(self, mock_keystone_auth_session, - log_warning_mock): - fd, path = tempfile.mkstemp(suffix='.pem') + @mock.patch("logging.Logger.warning") + @mock.patch("keystoneauth1.session.Session") + def test_dcmanager_url_https_bad_insecure( + self, _mock_keystone_auth_session, mock_log_warning + ): + fd, path = tempfile.mkstemp(suffix=".pem") try: client.client( dcmanager_url=DCMANAGER_HTTPS_URL, - username='dcmanager', - project_name='dcmanager', - api_key='password', + username="dcmanager", + project_name="dcmanager", + api_key="password", auth_url=AUTH_HTTP_URL, cacert=path, insecure=True, - **FAKE_KWARGS) + **FAKE_KWARGS + ) finally: os.close(fd) os.unlink(path) - self.assertTrue(log_warning_mock.called) + self.assertTrue(mock_log_warning.called) - @mock.patch('keystoneauth1.session.Session') - @mock.patch('dcmanagerclient.api.httpclient.HTTPClient') - def test_dcmanager_profile_enabled(self, mock, mock_keystone_auth_session): + @mock.patch("keystoneauth1.session.Session") + @mock.patch("dcmanagerclient.api.httpclient.HTTPClient") + def test_dcmanager_profile_enabled( + self, mock_client, mock_keystone_auth_session + ): keystone_session_instance = mock_keystone_auth_session.return_value - token = keystone_session_instance.get_token.return_value = \ - str(uuid.uuid4()) - project_id = keystone_session_instance.get_project_id.return_value = \ - str(uuid.uuid4()) - user_id = keystone_session_instance.get_user_id.return_value = \ - str(uuid.uuid4()) - keystone_session_instance.get_endpoint.return_value = \ - DCMANAGER_HTTP_URL + token = keystone_session_instance.get_token.return_value = str(uuid.uuid4()) + project_id = keystone_session_instance.get_project_id.return_value = str( + uuid.uuid4() + ) + user_id = keystone_session_instance.get_user_id.return_value = str( + uuid.uuid4() + ) + keystone_session_instance.get_endpoint.return_value = DCMANAGER_HTTP_URL expected_args = (DCMANAGER_HTTP_URL, token, project_id, user_id) - expected_kwargs = { - 'cacert': None, - 'insecure': False - } + expected_kwargs = {"cacert": None, "insecure": False} client.client( - username='dcmanager', - project_name='dcmanager', + username="dcmanager", + project_name="dcmanager", auth_url=AUTH_HTTP_URL, - api_key='password', + api_key="password", profile=PROFILER_HMAC_KEY, - **FAKE_KWARGS) + **FAKE_KWARGS + ) - self.assertTrue(mock.called) - self.assertEqual(mock.call_args[0], expected_args) - self.assertDictEqual(mock.call_args[1], expected_kwargs) + self.assertTrue(mock_client.called) + self.assertEqual(mock_client.call_args[0], expected_args) + self.assertDictEqual(mock_client.call_args[1], expected_kwargs) profiler = osprofiler.profiler.get() self.assertEqual(profiler.hmac_key, PROFILER_HMAC_KEY) def test_no_api_key(self): - self.assertRaises(RuntimeError, client.client, - dcmanager_url=DCMANAGER_HTTP_URL, - username='dcmanager', project_name='dcmanager', - auth_url=AUTH_HTTP_URL, **FAKE_KWARGS) + self.assertRaises( + RuntimeError, + client.client, + dcmanager_url=DCMANAGER_HTTP_URL, + username="dcmanager", + project_name="dcmanager", + auth_url=AUTH_HTTP_URL, + **FAKE_KWARGS + ) def test_project_name_and_project_id(self): - self.assertRaises(RuntimeError, client.client, - dcmanager_url=DCMANAGER_HTTP_URL, - username='dcmanager', project_name='dcmanager', - project_id=str(uuid.uuid4()), - auth_url=AUTH_HTTP_URL, **FAKE_KWARGS) + self.assertRaises( + RuntimeError, + client.client, + dcmanager_url=DCMANAGER_HTTP_URL, + username="dcmanager", + project_name="dcmanager", + project_id=str(uuid.uuid4()), + auth_url=AUTH_HTTP_URL, + **FAKE_KWARGS + ) def test_user_name_and_user_id(self): - self.assertRaises(RuntimeError, client.client, - dcmanager_url=DCMANAGER_HTTP_URL, - username='dcmanager', project_name='dcmanager', - user_id=str(uuid.uuid4()), - auth_url=AUTH_HTTP_URL, **FAKE_KWARGS) + self.assertRaises( + RuntimeError, + client.client, + dcmanager_url=DCMANAGER_HTTP_URL, + username="dcmanager", + project_name="dcmanager", + user_id=str(uuid.uuid4()), + auth_url=AUTH_HTTP_URL, + **FAKE_KWARGS + ) diff --git a/distributedcloud-client/dcmanagerclient/tests/test_help_and_bash_completion.py b/distributedcloud-client/dcmanagerclient/tests/test_help_and_bash_completion.py index fef811a..c7d66a4 100644 --- a/distributedcloud-client/dcmanagerclient/tests/test_help_and_bash_completion.py +++ b/distributedcloud-client/dcmanagerclient/tests/test_help_and_bash_completion.py @@ -1,5 +1,5 @@ # Copyright 2016 Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,18 +23,20 @@ from dcmanagerclient.tests import base_shell_test as base class TestCLIBashCompletionV1(base.BaseShellTests): def test_bash_completion(self): - bash_completion, stderr = self.shell('bash-completion') - self.assertIn('bash-completion', bash_completion) + bash_completion, stderr = self.shell("bash-completion") + self.assertIn("bash-completion", bash_completion) self.assertFalse(stderr) class TestCLIHelp(base.BaseShellTests): def test_help(self): required = [ - '.*?^usage: ', - '.*?^\s+help\s+print detailed help for another command' + r".*?^usage: ", + r".*?^\s+help\s+print detailed help for another command", ] - kb_help, stderr = self.shell('help') + kb_help, stderr = self.shell("help") for r in required: - self.assertThat((kb_help + stderr), - matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE)) + self.assertThat( + (kb_help + stderr), + matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE), + ) diff --git a/distributedcloud-client/dcmanagerclient/tests/test_httpclient.py b/distributedcloud-client/dcmanagerclient/tests/test_httpclient.py index 8afb491..6658ccb 100644 --- a/distributedcloud-client/dcmanagerclient/tests/test_httpclient.py +++ b/distributedcloud-client/dcmanagerclient/tests/test_httpclient.py @@ -1,6 +1,6 @@ # Copyright 2016 - StackStorm, Inc. # Copyright 2016 - Ericsson AB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,40 +19,33 @@ import copy import uuid import mock +from osprofiler import _utils as osprofiler_utils +import osprofiler.profiler import requests import testtools -from osprofiler import _utils as osprofiler_utils -import osprofiler.profiler - from dcmanagerclient.api import httpclient -API_BASE_URL = 'http://localhost:8119/v1.0' -API_URL = '/os-quota-sets' +API_BASE_URL = "http://localhost:8119/v1.0" +API_URL = "/os-quota-sets" EXPECTED_URL = API_BASE_URL + API_URL AUTH_TOKEN = str(uuid.uuid4()) PROJECT_ID = str(uuid.uuid4()) USER_ID = str(uuid.uuid4()) -PROFILER_HMAC_KEY = 'SECRET_HMAC_KEY' +PROFILER_HMAC_KEY = "SECRET_HMAC_KEY" PROFILER_TRACE_ID = str(uuid.uuid4()) EXPECTED_AUTH_HEADERS = { - 'x-auth-token': AUTH_TOKEN, - 'X-Project-Id': PROJECT_ID, - 'X-User-Id': USER_ID + "x-auth-token": AUTH_TOKEN, + "X-Project-Id": PROJECT_ID, + "X-User-Id": USER_ID, } -EXPECTED_REQ_OPTIONS = { - 'headers': EXPECTED_AUTH_HEADERS -} +EXPECTED_REQ_OPTIONS = {"headers": EXPECTED_AUTH_HEADERS} -EXPECTED_BODY = { - 'k1': 'abc', - 'k2': 123, - 'k3': True -} +EXPECTED_BODY = {"k1": "abc", "k2": 123, "k3": True} class FakeRequest(object): @@ -65,7 +58,7 @@ class FakeResponse(object): self.request = FakeRequest(method) self.url = url self.status_code = status_code - self.text = '' + self.text = "" class HTTPClientTest(testtools.TestCase): @@ -73,227 +66,130 @@ class HTTPClientTest(testtools.TestCase): super(HTTPClientTest, self).setUp() osprofiler.profiler.init(None) self.client = httpclient.HTTPClient( - API_BASE_URL, - AUTH_TOKEN, - PROJECT_ID, - USER_ID + API_BASE_URL, AUTH_TOKEN, PROJECT_ID, USER_ID ) - @mock.patch.object( - requests, - 'get', - mock.MagicMock(return_value=FakeResponse('get', EXPECTED_URL, 200)) - ) - def test_get_request_options(self): + @mock.patch.object(requests, "get") + def test_get_request_options(self, mock_requests_get): osprofiler.profiler.clean() self.client.get(API_URL) - requests.get.assert_called_with( - EXPECTED_URL, - **EXPECTED_REQ_OPTIONS - ) + mock_requests_get.return_value = FakeResponse("get", EXPECTED_URL, 200) + mock_requests_get.assert_called_with(EXPECTED_URL, **EXPECTED_REQ_OPTIONS) - @mock.patch.object( - requests, - 'get', - mock.MagicMock(return_value=FakeResponse('get', EXPECTED_URL, 200)) - ) - def test_get_request_options_with_headers_for_get(self): - headers = {'foo': 'bar'} + @mock.patch.object(requests, "get") + def test_get_request_options_with_headers_for_get(self, mock_requests_get): + headers = {"foo": "bar"} self.client.get(API_URL, headers=headers) + mock_requests_get.return_value = FakeResponse("get", EXPECTED_URL, 200) expected_options = copy.deepcopy(EXPECTED_REQ_OPTIONS) - expected_options['headers'].update(headers) + expected_options["headers"].update(headers) - requests.get.assert_called_with( - EXPECTED_URL, - **expected_options - ) + mock_requests_get.assert_called_with(EXPECTED_URL, **expected_options) - @mock.patch.object( - osprofiler.profiler._Profiler, - 'get_base_id', - mock.MagicMock(return_value=PROFILER_TRACE_ID) - ) - @mock.patch.object( - osprofiler.profiler._Profiler, - 'get_id', - mock.MagicMock(return_value=PROFILER_TRACE_ID) - ) - @mock.patch.object( - requests, - 'get', - mock.MagicMock(return_value=FakeResponse('get', EXPECTED_URL, 200)) - ) - def test_get_request_options_with_profile_enabled(self): + @mock.patch.object(osprofiler.profiler._Profiler, "get_base_id") + @mock.patch.object(osprofiler.profiler._Profiler, "get_id") + @mock.patch.object(requests, "get") + def test_get_request_options_with_profile_enabled( + self, mock_requests_get, mock_profiler_get_id, mock_profiler_get_base_id + ): osprofiler.profiler.clean() osprofiler.profiler.init(PROFILER_HMAC_KEY) - data = {'base_id': PROFILER_TRACE_ID, 'parent_id': PROFILER_TRACE_ID} + mock_requests_get.return_value = FakeResponse("get", EXPECTED_URL, 200) + mock_profiler_get_id.return_value = PROFILER_TRACE_ID + mock_profiler_get_base_id.return_value = PROFILER_TRACE_ID + + data = {"base_id": PROFILER_TRACE_ID, "parent_id": PROFILER_TRACE_ID} signed_data = osprofiler_utils.signed_pack(data, PROFILER_HMAC_KEY) - headers = { - 'X-Trace-Info': signed_data[0], - 'X-Trace-HMAC': signed_data[1] - } + headers = {"X-Trace-Info": signed_data[0], "X-Trace-HMAC": signed_data[1]} self.client.get(API_URL) expected_options = copy.deepcopy(EXPECTED_REQ_OPTIONS) - expected_options['headers'].update(headers) + expected_options["headers"].update(headers) - requests.get.assert_called_with( - EXPECTED_URL, - **expected_options - ) + mock_requests_get.assert_called_with(EXPECTED_URL, **expected_options) - @mock.patch.object( - requests, - 'post', - mock.MagicMock(return_value=FakeResponse('post', EXPECTED_URL, 201)) - ) - def test_get_request_options_with_headers_for_post(self): - headers = {'foo': 'bar'} + @mock.patch.object(requests, "post") + def test_get_request_options_with_headers_for_post(self, mock_requests_post): + headers = {"foo": "bar"} self.client.post(API_URL, EXPECTED_BODY, headers=headers) + mock_requests_post.return_value = FakeResponse("post", EXPECTED_URL, 201) expected_options = copy.deepcopy(EXPECTED_REQ_OPTIONS) - expected_options['headers'].update(headers) - expected_options['headers']['content-type'] = 'application/json' + expected_options["headers"].update(headers) + expected_options["headers"]["content-type"] = "application/json" - requests.post.assert_called_with( - EXPECTED_URL, - EXPECTED_BODY, - **expected_options + mock_requests_post.assert_called_with( + EXPECTED_URL, EXPECTED_BODY, **expected_options ) - @mock.patch.object( - requests, - 'put', - mock.MagicMock(return_value=FakeResponse('put', EXPECTED_URL, 200)) - ) - def test_get_request_options_with_headers_for_put(self): - headers = {'foo': 'bar'} + @mock.patch.object(requests, "put") + def test_get_request_options_with_headers_for_put(self, mock_requests_put): + headers = {"foo": "bar"} self.client.put(API_URL, EXPECTED_BODY, headers=headers) + mock_requests_put.return_value = FakeResponse("put", EXPECTED_URL, 200) expected_options = copy.deepcopy(EXPECTED_REQ_OPTIONS) - expected_options['headers'].update(headers) - expected_options['headers']['content-type'] = 'application/json' + expected_options["headers"].update(headers) + expected_options["headers"]["content-type"] = "application/json" - requests.put.assert_called_with( - EXPECTED_URL, - EXPECTED_BODY, - **expected_options + mock_requests_put.assert_called_with( + EXPECTED_URL, EXPECTED_BODY, **expected_options ) - @mock.patch.object( - requests, - 'delete', - mock.MagicMock(return_value=FakeResponse('delete', EXPECTED_URL, 200)) - ) - def test_get_request_options_with_headers_for_delete(self): - headers = {'foo': 'bar'} + @mock.patch.object(requests, "delete") + def test_get_request_options_with_headers_for_delete(self, mock_requests_delete): + headers = {"foo": "bar"} self.client.delete(API_URL, headers=headers) + mock_requests_delete.return_value = FakeResponse("delete", EXPECTED_URL, 200) expected_options = copy.deepcopy(EXPECTED_REQ_OPTIONS) - expected_options['headers'].update(headers) + expected_options["headers"].update(headers) - requests.delete.assert_called_with( - EXPECTED_URL, - **expected_options - ) + mock_requests_delete.assert_called_with(EXPECTED_URL, **expected_options) - @mock.patch.object( - httpclient.HTTPClient, - '_get_request_options', - mock.MagicMock(return_value=copy.deepcopy(EXPECTED_REQ_OPTIONS)) - ) - @mock.patch.object( - requests, - 'get', - mock.MagicMock(return_value=FakeResponse('get', EXPECTED_URL, 200)) - ) - def test_http_get(self): + @mock.patch.object(httpclient.HTTPClient, "_get_request_options") + @mock.patch.object(requests, "get") + def test_http_get(self, mock_requests_get, mock_get_request_options): self.client.get(API_URL) + mock_requests_get.return_value = FakeResponse("get", EXPECTED_URL, 200) + mock_get_request_options.return_value = copy.deepcopy(EXPECTED_REQ_OPTIONS) - httpclient.HTTPClient._get_request_options.assert_called_with( - 'get', - None - ) + mock_get_request_options.assert_called_with("get", None) + mock_requests_get.assert_called_with(EXPECTED_URL) - requests.get.assert_called_with( - EXPECTED_URL, - **EXPECTED_REQ_OPTIONS - ) - - @mock.patch.object( - httpclient.HTTPClient, - '_get_request_options', - mock.MagicMock(return_value=copy.deepcopy(EXPECTED_REQ_OPTIONS)) - ) - @mock.patch.object( - requests, - 'post', - mock.MagicMock(return_value=FakeResponse('post', EXPECTED_URL, 201)) - ) - def test_http_post(self): + @mock.patch.object(httpclient.HTTPClient, "_get_request_options") + @mock.patch.object(requests, "post") + def test_http_post(self, mock_requests_post, mock_get_request_options): self.client.post(API_URL, EXPECTED_BODY) + mock_get_request_options.return_value = copy.deepcopy(EXPECTED_REQ_OPTIONS) + mock_requests_post.return_value = FakeResponse("post", EXPECTED_URL, 201) - httpclient.HTTPClient._get_request_options.assert_called_with( - 'post', - None - ) + mock_get_request_options.assert_called_with("post", None) + mock_requests_post.assert_called_with(EXPECTED_URL, EXPECTED_BODY) - requests.post.assert_called_with( - EXPECTED_URL, - EXPECTED_BODY, - **EXPECTED_REQ_OPTIONS - ) - - @mock.patch.object( - httpclient.HTTPClient, - '_get_request_options', - mock.MagicMock(return_value=copy.deepcopy(EXPECTED_REQ_OPTIONS)) - ) - @mock.patch.object( - requests, - 'put', - mock.MagicMock(return_value=FakeResponse('put', EXPECTED_URL, 200)) - ) - def test_http_put(self): + @mock.patch.object(httpclient.HTTPClient, "_get_request_options") + @mock.patch.object(requests, "put") + def test_http_put(self, mock_requests_put, mock_get_request_options): self.client.put(API_URL, EXPECTED_BODY) + mock_get_request_options.return_value = copy.deepcopy(EXPECTED_REQ_OPTIONS) + mock_requests_put.return_value = FakeResponse("put", EXPECTED_URL, 200) - httpclient.HTTPClient._get_request_options.assert_called_with( - 'put', - None - ) + mock_get_request_options.assert_called_with("put", None) + mock_requests_put.assert_called_with(EXPECTED_URL, EXPECTED_BODY) - requests.put.assert_called_with( - EXPECTED_URL, - EXPECTED_BODY, - **EXPECTED_REQ_OPTIONS - ) - - @mock.patch.object( - httpclient.HTTPClient, - '_get_request_options', - mock.MagicMock(return_value=copy.deepcopy(EXPECTED_REQ_OPTIONS)) - ) - @mock.patch.object( - requests, - 'delete', - mock.MagicMock(return_value=FakeResponse('delete', EXPECTED_URL, 200)) - ) - def test_http_delete(self): + @mock.patch.object(httpclient.HTTPClient, "_get_request_options") + @mock.patch.object(requests, "delete") + def test_http_delete(self, mock_requests_delete, mock_get_request_options): self.client.delete(API_URL) + mock_get_request_options.return_value = copy.deepcopy(EXPECTED_REQ_OPTIONS) + mock_requests_delete.return_value = FakeResponse("delete", EXPECTED_URL, 200) - httpclient.HTTPClient._get_request_options.assert_called_with( - 'delete', - None - ) - - requests.delete.assert_called_with( - EXPECTED_URL, - **EXPECTED_REQ_OPTIONS - ) + mock_get_request_options.assert_called_with("delete", None) + mock_requests_delete.assert_called_with(EXPECTED_URL) diff --git a/distributedcloud-client/dcmanagerclient/tests/test_shell.py b/distributedcloud-client/dcmanagerclient/tests/test_shell.py index 8144f45..1d8bd00 100644 --- a/distributedcloud-client/dcmanagerclient/tests/test_shell.py +++ b/distributedcloud-client/dcmanagerclient/tests/test_shell.py @@ -1,5 +1,5 @@ # Copyright 2016 EricssonAB. -# Copyright (c) 2017-2021 Wind River Systems, Inc. +# Copyright (c) 2017-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,181 +15,168 @@ # import mock - from dcmanagerclient.tests import base_shell_test as base class TestShell(base.BaseShellTests): - @mock.patch('dcmanagerclient.api.client.determine_client_version') - def test_dcmanager_version(self, mock): - self.shell( - '--os-dcmanager-version=v1 quota-defaults' - ) - self.assertTrue(mock.called) - dcmanager_version = mock.call_args - self.assertEqual('v1.0', dcmanager_version[0][0]) + @mock.patch("dcmanagerclient.api.client.determine_client_version") + def test_dcmanager_version(self, mock_client_version): + self.shell("--os-dcmanager-version=v1 quota-defaults") + self.assertTrue(mock_client_version.called) + dcmanager_version = mock_client_version.call_args + self.assertEqual("v1.0", dcmanager_version[0][0]) def test_dcmanager_help(self): - help_results = self.shell('--help') + help_results = self.shell("--help") self.assertTrue("Commands for API" in help_results[0]) - @mock.patch('dcmanagerclient.api.client.determine_client_version') - def test_default_dcmanager_version(self, mock): - default_version = 'v1.0' - self.shell('quota defaults') - self.assertTrue(mock.called) - dcmanager_version = mock.call_args + @mock.patch("dcmanagerclient.api.client.determine_client_version") + def test_default_dcmanager_version(self, mock_client_version): + default_version = "v1.0" + self.shell("quota defaults") + self.assertTrue(mock_client_version.called) + dcmanager_version = mock_client_version.call_args self.assertEqual(default_version, dcmanager_version[0][0]) - @mock.patch('dcmanagerclient.api.client.client') - def test_env_variables(self, mock): + @mock.patch("dcmanagerclient.api.client.client") + def test_env_variables(self, mock_client): self.shell( - '--os-auth-url=https://127.0.0.1:35357/v3 ' - '--os-username=admin ' - '--os-password=1234 ' - '--os-tenant-name=admin ' - 'quota defaults' + "--os-auth-url=https://127.0.0.1:35357/v3 " + "--os-username=admin " + "--os-password=1234 " + "--os-tenant-name=admin " + "quota defaults" ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('https://127.0.0.1:35357/v3', params[1]['auth_url']) - self.assertEqual('admin', params[1]['username']) - self.assertEqual('admin', params[1]['project_name']) + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("https://127.0.0.1:35357/v3", params[1]["auth_url"]) + self.assertEqual("admin", params[1]["username"]) + self.assertEqual("admin", params[1]["project_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_env_without_auth_url(self, mock): + @mock.patch("dcmanagerclient.api.client.client") + def test_env_without_auth_url(self, mock_client): self.shell( - '--os-username=admin ' - '--os-password=1234 ' - '--os-tenant-name=admin ' - 'quota defaults' + "--os-username=admin " + "--os-password=1234 " + "--os-tenant-name=admin " + "quota defaults" ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('', params[1]['auth_url']) - self.assertEqual('admin', params[1]['username']) - self.assertEqual('admin', params[1]['project_name']) + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("", params[1]["auth_url"]) + self.assertEqual("admin", params[1]["username"]) + self.assertEqual("admin", params[1]["project_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_kb_service_type(self, mock): - self.shell('--os-service-type=dcmanager') - self.assertTrue(mock.called) - parameters = mock.call_args - self.assertEqual('dcmanager', parameters[1]['service_type']) + @mock.patch("dcmanagerclient.api.client.client") + def test_kb_service_type(self, mock_client): + self.shell("--os-service-type=dcmanager") + self.assertTrue(mock_client.called) + parameters = mock_client.call_args + self.assertEqual("dcmanager", parameters[1]["service_type"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_kb_default_service_type(self, mock): - self.shell('quota defaults') - self.assertTrue(mock.called) - params = mock.call_args + @mock.patch("dcmanagerclient.api.client.client") + def test_kb_default_service_type(self, mock_client): + self.shell("quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args # Default service type is dcmanager - self.assertEqual('dcmanager', params[1]['service_type']) + self.assertEqual("dcmanager", params[1]["service_type"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_kb_endpoint_type(self, mock): - self.shell('--os-endpoint-type=adminURL quota-defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('adminURL', params[1]['endpoint_type']) + @mock.patch("dcmanagerclient.api.client.client") + def test_kb_endpoint_type(self, mock_client): + self.shell("--os-endpoint-type=adminURL quota-defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("adminURL", params[1]["endpoint_type"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_kb_default_endpoint_type(self, mock): - self.shell('quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('internalURL', params[1]['endpoint_type']) + @mock.patch("dcmanagerclient.api.client.client") + def test_kb_default_endpoint_type(self, mock_client): + self.shell("quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("internalURL", params[1]["endpoint_type"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_os_auth_token(self, mock): - self.shell( - '--os-auth-token=abcd1234 ' - 'quota defaults' - ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('abcd1234', params[1]['auth_token']) + @mock.patch("dcmanagerclient.api.client.client") + def test_os_auth_token(self, mock_client): + self.shell("--os-auth-token=abcd1234 quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("abcd1234", params[1]["auth_token"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_command_without_dcmanager_url(self, mock): - self.shell( - 'quota defaults' - ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('', params[1]['dcmanager_url']) + @mock.patch("dcmanagerclient.api.client.client") + def test_command_without_dcmanager_url(self, mock_client): + self.shell("quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("", params[1]["dcmanager_url"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_command_with_dcmanager_url(self, mock): - self.shell( - '--dcmanager-url=http://localhost:8118/v1.0 quota-defaults' - ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('http://localhost:8118/v1.0', - params[1]['dcmanager_url']) + @mock.patch("dcmanagerclient.api.client.client") + def test_command_with_dcmanager_url(self, mock_client): + self.shell("--dcmanager-url=http://localhost:8118/v1.0 quota-defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("http://localhost:8118/v1.0", params[1]["dcmanager_url"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_command_without_project_name(self, mock): - self.shell( - 'quota defaults' - ) - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('', params[1]['project_name']) + @mock.patch("dcmanagerclient.api.client.client") + def test_command_without_project_name(self, mock_client): + self.shell("quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("", params[1]["project_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_profile(self, mock): - self.shell('--profile=SECRET_HMAC_KEY quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('SECRET_HMAC_KEY', params[1]['profile']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_profile(self, mock_client): + self.shell("--profile=SECRET_HMAC_KEY quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("SECRET_HMAC_KEY", params[1]["profile"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_without_profile(self, mock): - self.shell('quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual(None, params[1]['profile']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_without_profile(self, mock_client): + self.shell("quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual(None, params[1]["profile"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_project_name(self, mock): - self.shell('--os-project-name default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['project_name']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_project_name(self, mock_client): + self.shell("--os-project-name default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["project_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_tenant_name(self, mock): - self.shell('--os-tenant-name default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['project_name']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_tenant_name(self, mock_client): + self.shell("--os-tenant-name default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["project_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_project_domain_name(self, mock): - self.shell('--os-project-domain-name default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['project_domain_name']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_project_domain_name(self, mock_client): + self.shell("--os-project-domain-name default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["project_domain_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_project_domain_id(self, mock): - self.shell('--os-project-domain-id default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['project_domain_id']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_project_domain_id(self, mock_client): + self.shell("--os-project-domain-id default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["project_domain_id"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_user_domain_name(self, mock): - self.shell('--os-user-domain-name default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['user_domain_name']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_user_domain_name(self, mock_client): + self.shell("--os-user-domain-name default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["user_domain_name"]) - @mock.patch('dcmanagerclient.api.client.client') - def test_dcmanager_user_domain_id(self, mock): - self.shell('--os-user-domain-id default quota defaults') - self.assertTrue(mock.called) - params = mock.call_args - self.assertEqual('default', params[1]['user_domain_id']) + @mock.patch("dcmanagerclient.api.client.client") + def test_dcmanager_user_domain_id(self, mock_client): + self.shell("--os-user-domain-id default quota defaults") + self.assertTrue(mock_client.called) + params = mock_client.call_args + self.assertEqual("default", params[1]["user_domain_id"]) diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/mixins.py b/distributedcloud-client/dcmanagerclient/tests/v1/mixins.py index af2ed9c..36539a5 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/mixins.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/mixins.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2020-2023 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -19,9 +19,8 @@ class UpdateStrategyMixin(object): - provide: self.apply_command - provide: self.abort_command """ - def setUp(self): - super(UpdateStrategyMixin, self).setUp() - self.results_length = 7 + + results_length = 7 def test_create_strategy(self): """Test that if no strategy exists, one can be created.""" @@ -37,7 +36,7 @@ class UpdateStrategyMixin(object): # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values - fields, results = self.call(self.create_command) + _, results = self.call(self.create_command) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) @@ -55,14 +54,16 @@ class UpdateStrategyMixin(object): # prepare mocked results manager_to_test = self.sw_update_manager expected_strategy_type = manager_to_test.update_type - expected_apply_type = 'parallel' - strategy = utils.make_strategy(strategy_type=expected_strategy_type, - subcloud_apply_type=expected_apply_type) + expected_apply_type = "parallel" + strategy = utils.make_strategy( + strategy_type=expected_strategy_type, + subcloud_apply_type=expected_apply_type, + ) manager_to_test.update_sw_strategy_detail.return_value = strategy # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values - fields, results = self.call(self.show_command) + _, results = self.call(self.show_command) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) # common result tuple values are @@ -80,14 +81,16 @@ class UpdateStrategyMixin(object): # prepare mocked results manager_to_test = self.sw_update_manager expected_strategy_type = manager_to_test.update_type - expected_apply_type = 'parallel' - strategy = utils.make_strategy(strategy_type=expected_strategy_type, - subcloud_apply_type=expected_apply_type) + expected_apply_type = "parallel" + strategy = utils.make_strategy( + strategy_type=expected_strategy_type, + subcloud_apply_type=expected_apply_type, + ) manager_to_test.apply_sw_update_strategy.return_value = strategy # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values - fields, results = self.call(self.apply_command) + _, results = self.call(self.apply_command) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) # common result tuple values are @@ -105,14 +108,16 @@ class UpdateStrategyMixin(object): # prepare mocked results manager_to_test = self.sw_update_manager expected_strategy_type = manager_to_test.update_type - expected_apply_type = 'parallel' - strategy = utils.make_strategy(strategy_type=expected_strategy_type, - subcloud_apply_type=expected_apply_type) + expected_apply_type = "parallel" + strategy = utils.make_strategy( + strategy_type=expected_strategy_type, + subcloud_apply_type=expected_apply_type, + ) manager_to_test.abort_sw_update_strategy.return_value = strategy # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values - fields, results = self.call(self.abort_command) + _, results = self.call(self.abort_command) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) # common result tuple values are @@ -130,14 +135,16 @@ class UpdateStrategyMixin(object): # prepare mocked results manager_to_test = self.sw_update_manager expected_strategy_type = manager_to_test.update_type - expected_apply_type = 'parallel' - strategy = utils.make_strategy(strategy_type=expected_strategy_type, - subcloud_apply_type=expected_apply_type) + expected_apply_type = "parallel" + strategy = utils.make_strategy( + strategy_type=expected_strategy_type, + subcloud_apply_type=expected_apply_type, + ) manager_to_test.delete_sw_update_strategy.return_value = strategy # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values - fields, results = self.call(self.delete_command) + _, results = self.call(self.delete_command) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) # common result tuple values are diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_patch_update_strategy.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_patch_update_strategy.py index f1fe1f0..e940f5b 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_patch_update_strategy.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_patch_update_strategy.py @@ -1,13 +1,13 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # from dcmanagerclient.commands.v1 import sw_patch_manager as cli_cmd from dcmanagerclient.tests import base -from dcmanagerclient.tests.v1.mixins import UpdateStrategyMixin from dcmanagerclient.tests.v1 import utils +from dcmanagerclient.tests.v1.mixins import UpdateStrategyMixin class TestPatchUpdateStrategy(UpdateStrategyMixin, base.BaseCommandTest): diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_phased_subcloud_deploy.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_phased_subcloud_deploy.py index 43b1b41..c640a1e 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_phased_subcloud_deploy.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_phased_subcloud_deploy.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Wind River Systems, Inc. +# Copyright (c) 2023-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -28,9 +28,9 @@ class TestCLIPhasedSubcloudDeployManagerV1(base.BaseCommandTest): self.client.subcloud_deploy_create.return_value = [ base.SUBCLOUD_RESOURCE] - with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file,\ - tempfile.NamedTemporaryFile(mode='w') as config_file,\ - tempfile.NamedTemporaryFile(mode='w') as install_file: + with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file, \ + tempfile.NamedTemporaryFile(mode='w') as config_file, \ + tempfile.NamedTemporaryFile(mode='w') as install_file: bootstrap_file_path = os.path.abspath(bootstrap_file.name) config_file_path = os.path.abspath(config_file.name) @@ -174,9 +174,9 @@ class TestCLIPhasedSubcloudDeployManagerV1(base.BaseCommandTest): self.client.subcloud_deploy_resume.return_value = [ base.SUBCLOUD_RESOURCE] - with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file,\ - tempfile.NamedTemporaryFile(mode='w') as config_file,\ - tempfile.NamedTemporaryFile(mode='w') as install_file: + with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file, \ + tempfile.NamedTemporaryFile(mode='w') as config_file, \ + tempfile.NamedTemporaryFile(mode='w') as install_file: bootstrap_file_path = os.path.abspath(bootstrap_file.name) config_file_path = os.path.abspath(config_file.name) @@ -199,9 +199,9 @@ class TestCLIPhasedSubcloudDeployManagerV1(base.BaseCommandTest): self.client.subcloud_deploy_resume.return_value = [ base.SUBCLOUD_RESOURCE] - with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file,\ - tempfile.NamedTemporaryFile(mode='w') as config_file,\ - tempfile.NamedTemporaryFile(mode='w') as install_file: + with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file, \ + tempfile.NamedTemporaryFile(mode='w') as config_file, \ + tempfile.NamedTemporaryFile(mode='w') as install_file: bootstrap_file_path = os.path.abspath(bootstrap_file.name) config_file_path = os.path.abspath(config_file.name) diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_step_manager.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_step_manager.py index bb7e297..21acae7 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_step_manager.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_step_manager.py @@ -1,5 +1,5 @@ # Copyright (c) 2017 Ericsson AB. -# Copyright (c) 2020-2021 Wind River Systems, Inc. +# Copyright (c) 2020-2021, 2024 Wind River Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ # import mock - from oslo_utils import timeutils from dcmanagerclient.api.v1.strategy_step_manager import StrategyStep @@ -24,8 +23,8 @@ from dcmanagerclient.tests import base TEST_CLOUD_ID = 1 TEST_STAGE = 1 -TEST_STATE = 'initializing' -TEST_DETAILS = 'some details' +TEST_STATE = "initializing" +TEST_DETAILS = "some details" TIME_NOW = timeutils.utcnow().isoformat() TEST_STARTED_AT = TIME_NOW TEST_FINISHED_AT = TIME_NOW @@ -35,23 +34,24 @@ TEST_UPDATED_AT = TIME_NOW class TestCLI(base.BaseCommandTest): - def setUp(self): - super(TestCLI, self).setUp() - def test_list_strategy_steps(self): - sample_step = StrategyStep(mock, - TEST_CLOUD_ID, - TEST_STAGE, - TEST_STATE, - TEST_DETAILS, - TEST_STARTED_AT, - TEST_FINISHED_AT, - TEST_CREATED_AT, - TEST_UPDATED_AT) + sample_step = StrategyStep( + mock, + TEST_CLOUD_ID, + TEST_STAGE, + TEST_STATE, + TEST_DETAILS, + TEST_STARTED_AT, + TEST_FINISHED_AT, + TEST_CREATED_AT, + TEST_UPDATED_AT, + ) results = [] results.append(sample_step) - self.app.client_manager.strategy_step_manager.strategy_step_manager.\ - list_strategy_steps.return_value = results + step_manager = ( + self.app.client_manager.strategy_step_manager.strategy_step_manager + ) + step_manager.list_strategy_steps.return_value = results actual_call = self.call(cli_cmd.ListSwUpdateStrategyStep) # ListStrategyStep returns a tuple, want the second field of the tuple diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_backup_manager.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_backup_manager.py index d9fa8d4..7ea25b1 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_backup_manager.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_backup_manager.py @@ -1,19 +1,20 @@ # -# Copyright (c) 2022-2023 Wind River Systems, Inc. +# Copyright (c) 2022-2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import base64 -import mock import os -from dcmanagerclient.commands.v1 \ - import subcloud_backup_manager as subcloud_backup_cmd +import mock + +from dcmanagerclient.commands.v1 import ( + subcloud_backup_manager as subcloud_backup_cmd, +) from dcmanagerclient.exceptions import DCManagerClientException from dcmanagerclient.tests import base - OVERRIDE_VALUES = """--- platform_backup_filename_prefix: test openstack_app_name: test @@ -24,450 +25,641 @@ OVERRIDE_VALUES = """--- class TestCLISubcloudBackUpManagerV1(base.BaseCommandTest): def setUp(self): - super(TestCLISubcloudBackUpManagerV1, self).setUp() + super().setUp() self.client = self.app.client_manager.subcloud_backup_manager def test_backup_create_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--local-only', - '--registry-images', - '--backup-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--subcloud", + "subcloud1", + "--local-only", + "--registry-images", + "--backup-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST, actual_call[1]) def test_backup_create_group(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--group', 'test', - '--backup-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--group", + "test", + "--backup-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual([base.SUBCLOUD_FIELD_RESULT_LIST], actual_call[1]) def test_backup_create_group_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--group', 'test', - '--local-only', - '--backup-values', backupPath, - '--sysadmin-password', 'testpassword']) - self.assertTrue(('The command only applies to a single subcloud or a' - ' subcloud group, not both.') in str(e)) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.CreateSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--group", + "test", + "--local-only", + "--backup-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) + self.assertTrue( + ( + "The command only applies to a single subcloud or a" + " subcloud group, not both." + ) + in str(e) + ) def test_backup_create_no_group_no_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--local-only', - '--backup-values', backupPath, - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.CreateSubcloudBackup, + app_args=[ + "--local-only", + "--backup-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Please provide the subcloud or subcloud group name' - ' or id.') in str(e)) + self.assertTrue( + ("Please provide the subcloud or subcloud group name" " or id.") + in str(e) + ) def test_backup_create_backup_value_not_a_file(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [] - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--local-only', - '--backup-values', 'notADirectory', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.CreateSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--local-only", + "--backup-values", + "notADirectory", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue('Backup-values file does not exist' in str(e)) + self.assertTrue("Backup-values file does not exist" in str(e)) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_backup_create_prompt_ask_for_password(self, getpass): + @mock.patch("getpass.getpass", return_value="testpassword") + def test_backup_create_prompt_ask_for_password(self, _mock_getpass): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--group', 'test', - '--local-only', - '--backup-values', backupPath]) + app_args=[ + "--group", + "test", + "--local-only", + "--backup-values", + backup_path, + ], + ) self.assertEqual([base.SUBCLOUD_FIELD_RESULT_LIST], actual_call[1]) def test_backup_create_local_only_registry_images(self): - self.client.subcloud_backup_manager.backup_subcloud_create.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_create.return_value = [] - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.CreateSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--registry-images', - '--backup-values', 'notADirectory', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.CreateSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--registry-images", + "--backup-values", + "notADirectory", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Option --registry-images can not be used without ' - '--local-only option.') in str(e)) + self.assertTrue( + ( + "Option --registry-images can not be used without " + "--local-only option." + ) + in str(e) + ) def test_backup_delete_no_group_no_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_delete.return_value = [] - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.DeleteSubcloudBackup, - app_args=['release', - '--local-only', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.DeleteSubcloudBackup, + app_args=[ + "release", + "--local-only", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Please provide the subcloud or subcloud group' - ' name or id.') in str(e)) + self.assertTrue( + ("Please provide the subcloud or subcloud group" " name or id.") + in str(e) + ) def test_backup_delete_group_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_delete.return_value = [] - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.DeleteSubcloudBackup, - app_args=['release', - '--subcloud', 'subcloud1', - '--group', 'group1', - '--local-only', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.DeleteSubcloudBackup, + app_args=[ + "release", + "--subcloud", + "subcloud1", + "--group", + "group1", + "--local-only", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('This command only applies to a single subcloud ' - 'or a subcloud group, not both.') in str(e)) + self.assertTrue( + ( + "This command only applies to a single subcloud " + "or a subcloud group, not both." + ) + in str(e) + ) def test_backup_delete_group(self): - groupName = 'test_group_1' - releaseVersion = 'release_version_2' - password = 'testpassword' - encodedPassword = base64.b64encode(password.encode("utf-8")).\ - decode("utf-8") + group_name = "test_group_1" + release_version = "release_version_2" + password = "testpassword" + encoded_password = base64.b64encode(password.encode("utf-8")).decode("utf-8") - payload = {'release': releaseVersion, 'group': groupName, - 'local_only': 'true', 'sysadmin_password': encodedPassword} + payload = { + "release": release_version, + "group": group_name, + "local_only": "true", + "sysadmin_password": encoded_password, + } - app_args = [releaseVersion, - '--group', groupName, - '--local-only', - '--sysadmin-password', password] + app_args = [ + release_version, + "--group", + group_name, + "--local-only", + "--sysadmin-password", + password, + ] self.call(subcloud_backup_cmd.DeleteSubcloudBackup, app_args=app_args) - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - assert_called_once_with(data=payload, - release_version=releaseVersion, - subcloud_ref=None) + subcloud_delete = self.client.subcloud_backup_manager.backup_subcloud_delete + subcloud_delete.assert_called_once_with( + data=payload, release_version=release_version, subcloud_ref=None + ) def test_backup_delete_subcloud(self): - subcloudName = 'subcloud1' - releaseVersion = 'release_version_2' - password = 'testpassword' - encodedPassword = base64.b64encode(password.encode("utf-8")).\ - decode("utf-8") + subcloud_name = "subcloud1" + release_version = "release_version_2" + password = "testpassword" + encoded_password = base64.b64encode(password.encode("utf-8")).decode("utf-8") - payload = {'release': releaseVersion, 'subcloud': subcloudName, - 'local_only': 'true', 'sysadmin_password': encodedPassword} + payload = { + "release": release_version, + "subcloud": subcloud_name, + "local_only": "true", + "sysadmin_password": encoded_password, + } - app_args = [releaseVersion, - '--subcloud', subcloudName, - '--local-only', - '--sysadmin-password', password] + app_args = [ + release_version, + "--subcloud", + subcloud_name, + "--local-only", + "--sysadmin-password", + password, + ] self.call(subcloud_backup_cmd.DeleteSubcloudBackup, app_args=app_args) - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - assert_called_once_with(data=payload, - release_version=releaseVersion, - subcloud_ref=subcloudName) + subcloud_delete = self.client.subcloud_backup_manager.backup_subcloud_delete + subcloud_delete.assert_called_once_with( + data=payload, release_version=release_version, subcloud_ref=subcloud_name + ) def test_backup_delete_no_local_only(self): - groupName = 'test_group_1' - releaseVersion = 'release_version_2' - password = 'testpassword' - encodedPassword = base64.b64encode(password.encode("utf-8")).\ - decode("utf-8") + group_name = "test_group_1" + release_version = "release_version_2" + password = "testpassword" + encoded_password = base64.b64encode(password.encode("utf-8")).decode("utf-8") - payload = {'release': releaseVersion, 'group': groupName, - 'local_only': 'false', 'sysadmin_password': encodedPassword} + payload = { + "release": release_version, + "group": group_name, + "local_only": "false", + "sysadmin_password": encoded_password, + } - app_args = [releaseVersion, - '--group', groupName, - '--sysadmin-password', password] + app_args = [ + release_version, + "--group", + group_name, + "--sysadmin-password", + password, + ] self.call(subcloud_backup_cmd.DeleteSubcloudBackup, app_args=app_args) - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - assert_called_once_with(data=payload, - release_version=releaseVersion, - subcloud_ref=None) + subcloud_delete = self.client.subcloud_backup_manager.backup_subcloud_delete + subcloud_delete.assert_called_once_with( + data=payload, release_version=release_version, subcloud_ref=None + ) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_backup_delete_prompt_ask_for_password(self, getpass): + @mock.patch("getpass.getpass", return_value="testpassword") + def test_backup_delete_prompt_ask_for_password(self, _mock_getpass): - groupName = 'test_group_1' - releaseVersion = 'release_version_2' - password = 'testpassword' - encodedPassword = base64.b64encode(password.encode("utf-8")).\ - decode("utf-8") + group_name = "test_group_1" + release_version = "release_version_2" + password = "testpassword" + encoded_password = base64.b64encode(password.encode("utf-8")).decode("utf-8") - payload = {'release': releaseVersion, 'group': groupName, - 'local_only': 'true', 'sysadmin_password': encodedPassword} + payload = { + "release": release_version, + "group": group_name, + "local_only": "true", + "sysadmin_password": encoded_password, + } - app_args = [releaseVersion, - '--group', groupName, - '--local-only'] + app_args = [release_version, "--group", group_name, "--local-only"] self.call(subcloud_backup_cmd.DeleteSubcloudBackup, app_args=app_args) - self.client.subcloud_backup_manager.backup_subcloud_delete.\ - assert_called_once_with(data=payload, - release_version=releaseVersion, - subcloud_ref=None) + subcloud_delete = self.client.subcloud_backup_manager.backup_subcloud_delete + subcloud_delete.assert_called_once_with( + data=payload, release_version=release_version, subcloud_ref=None + ) def test_backup_delete_subcloud_no_release_version(self): - subcloudName = 'subcloud1' - password = 'testpassword' + subcloud_name = "subcloud1" + password = "testpassword" - app_args = ['--subcloud', subcloudName, - '--local-only', - '--sysadmin-password', password] + app_args = [ + "--subcloud", + subcloud_name, + "--local-only", + "--sysadmin-password", + password, + ] - self.assertRaises(SystemExit, self.call, - subcloud_backup_cmd.DeleteSubcloudBackup, - app_args=app_args) + self.assertRaises( + SystemExit, + self.call, + subcloud_backup_cmd.DeleteSubcloudBackup, + app_args=app_args, + ) def test_backup_restore(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--local-only', - '--registry-images', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--subcloud", + "subcloud1", + "--local-only", + "--registry-images", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST, actual_call[1]) def test_backup_restore_no_restore_values(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--local-only', - '--registry-images', - '--sysadmin-password', 'testpassword']) + app_args=[ + "--subcloud", + "subcloud1", + "--local-only", + "--registry-images", + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST, actual_call[1]) def test_backup_restore_with_group(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--group', 'test', - '--with-install', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--group", + "test", + "--with-install", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual([base.SUBCLOUD_FIELD_RESULT_LIST], actual_call[1]) def test_backup_restore_group_and_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--group', 'test', - '--local-only', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) - self.assertTrue(('The command only applies to a single subcloud or a' - ' subcloud group, not both.') in str(e)) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.RestoreSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--group", + "test", + "--local-only", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) + self.assertTrue( + ( + "The command only applies to a single subcloud or a" + " subcloud group, not both." + ) + in str(e) + ) def test_backup_restore_no_group_and_no_subcloud(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--local-only', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.RestoreSubcloudBackup, + app_args=[ + "--local-only", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Please provide the subcloud or subcloud group name' - ' or id.') in str(e)) + self.assertTrue( + ("Please provide the subcloud or subcloud group name" " or id.") + in str(e) + ) def test_backup_restore_backup_value_not_a_file(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [] - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--local-only', - '--restore-values', 'notADirectory', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.RestoreSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--local-only", + "--restore-values", + "notADirectory", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue('Restore_values file does not exist' in str(e)) + self.assertTrue("restore_values file does not exist" in str(e)) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_backup_restore_prompt_ask_for_password(self, getpass): + @mock.patch("getpass.getpass", return_value="testpassword") + def test_backup_restore_prompt_ask_for_password(self, _mock_getpass): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--group', 'test', - '--local-only', - '--restore-values', backupPath]) + app_args=[ + "--group", + "test", + "--local-only", + "--restore-values", + backup_path, + ], + ) self.assertEqual([base.SUBCLOUD_FIELD_RESULT_LIST], actual_call[1]) def test_backup_restore_local_only_registry_images(self): - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--registry-images', - '--restore-values', 'notADirectory', - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.RestoreSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--registry-images", + "--restore-values", + "notADirectory", + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Option --registry-images cannot be used without ' - '--local-only option.') in str(e)) + self.assertTrue( + ( + "Option --registry-images cannot be used without " + "--local-only option." + ) + in str(e) + ) def test_backup_restore_with_install_no_release(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--with-install', - '--local-only', - '--registry-images', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--subcloud", + "subcloud1", + "--with-install", + "--local-only", + "--registry-images", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST, actual_call[1]) def test_backup_restore_with_install_with_release(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) actual_call = self.call( subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--with-install', - '--release', base.SOFTWARE_VERSION, - '--local-only', - '--registry-images', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + app_args=[ + "--subcloud", + "subcloud1", + "--with-install", + "--release", + base.SOFTWARE_VERSION, + "--local-only", + "--registry-images", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST, actual_call[1]) def test_backup_restore_no_install_with_release(self): - self.client.subcloud_backup_manager.backup_subcloud_restore.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_backup_manager.backup_subcloud_restore.return_value = [ + base.SUBCLOUD_RESOURCE + ] - backupPath = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) - with open(backupPath, mode='w') as f: + backup_path = os.path.normpath(os.path.join(os.getcwd(), "test.yaml")) + with open(backup_path, mode="w", encoding="UTF-8") as f: f.write(OVERRIDE_VALUES) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_backup_cmd.RestoreSubcloudBackup, - app_args=['--subcloud', 'subcloud1', - '--release', base.SOFTWARE_VERSION, - '--local-only', - '--registry-images', - '--restore-values', backupPath, - '--sysadmin-password', 'testpassword']) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_backup_cmd.RestoreSubcloudBackup, + app_args=[ + "--subcloud", + "subcloud1", + "--release", + base.SOFTWARE_VERSION, + "--local-only", + "--registry-images", + "--restore-values", + backup_path, + "--sysadmin-password", + "testpassword", + ], + ) - self.assertTrue(('Option --release cannot be used without ' - '--with-install option.') in str(e)) + self.assertTrue( + ("Option --release cannot be used without " "--with-install option.") + in str(e) + ) diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_deploy_manager.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_deploy_manager.py index 4e0c7cb..7ae8b57 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_deploy_manager.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2023 Wind River Systems, Inc. +# Copyright (c) 2020-2024 Wind River Systems, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -12,67 +12,67 @@ # limitations under the License. # -import mock import os import tempfile +import mock + from dcmanagerclient.api.v1 import subcloud_deploy_manager as sdm -from dcmanagerclient.commands.v1 \ - import subcloud_deploy_manager as subcloud_deploy_cmd +from dcmanagerclient.commands.v1 import subcloud_deploy_manager as \ + subcloud_deploy_cmd from dcmanagerclient.exceptions import DCManagerClientException from dcmanagerclient.tests import base - -DEPLOY_PLAYBOOK = 'deployment-manager-playbook.yaml' -DEPLOY_OVERRIDES = 'deployment-manager-overrides-subcloud.yaml' -DEPLOY_CHART = 'deployment-manager.tgz' -DEPLOY_PRESTAGE_IMAGES = 'prebuilt-images.lst' +DEPLOY_PLAYBOOK = "deployment-manager-playbook.yaml" +DEPLOY_OVERRIDES = "deployment-manager-overrides-subcloud.yaml" +DEPLOY_CHART = "deployment-manager.tgz" +DEPLOY_PRESTAGE_IMAGES = "prebuilt-images.lst" SUBCLOUD_DEPLOY_DICT = { - 'DEPLOY_PLAYBOOK': DEPLOY_PLAYBOOK, - 'DEPLOY_OVERRIDES': DEPLOY_OVERRIDES, - 'DEPLOY_CHART': DEPLOY_CHART, - 'DEPLOY_PRESTAGE_IMAGES': DEPLOY_PRESTAGE_IMAGES, - 'SOFTWARE_VERSION': base.SOFTWARE_VERSION + "DEPLOY_PLAYBOOK": DEPLOY_PLAYBOOK, + "DEPLOY_OVERRIDES": DEPLOY_OVERRIDES, + "DEPLOY_CHART": DEPLOY_CHART, + "DEPLOY_PRESTAGE_IMAGES": DEPLOY_PRESTAGE_IMAGES, + "SOFTWARE_VERSION": base.SOFTWARE_VERSION, } SUBCLOUD_DEPLOY_ALL = sdm.SubcloudDeploy( - deploy_playbook=SUBCLOUD_DEPLOY_DICT['DEPLOY_PLAYBOOK'], - deploy_overrides=SUBCLOUD_DEPLOY_DICT['DEPLOY_OVERRIDES'], - deploy_chart=SUBCLOUD_DEPLOY_DICT['DEPLOY_CHART'], - prestage_images=SUBCLOUD_DEPLOY_DICT['DEPLOY_PRESTAGE_IMAGES'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + deploy_playbook=SUBCLOUD_DEPLOY_DICT["DEPLOY_PLAYBOOK"], + deploy_overrides=SUBCLOUD_DEPLOY_DICT["DEPLOY_OVERRIDES"], + deploy_chart=SUBCLOUD_DEPLOY_DICT["DEPLOY_CHART"], + prestage_images=SUBCLOUD_DEPLOY_DICT["DEPLOY_PRESTAGE_IMAGES"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) SUBCLOUD_DEPLOY_PRESTAGE = sdm.SubcloudDeploy( - prestage_images=SUBCLOUD_DEPLOY_DICT['DEPLOY_PRESTAGE_IMAGES'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + prestage_images=SUBCLOUD_DEPLOY_DICT["DEPLOY_PRESTAGE_IMAGES"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) SUBCLOUD_DEPLOY_NO_PRESTAGE = sdm.SubcloudDeploy( - deploy_playbook=SUBCLOUD_DEPLOY_DICT['DEPLOY_PLAYBOOK'], - deploy_overrides=SUBCLOUD_DEPLOY_DICT['DEPLOY_OVERRIDES'], - deploy_chart=SUBCLOUD_DEPLOY_DICT['DEPLOY_CHART'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + deploy_playbook=SUBCLOUD_DEPLOY_DICT["DEPLOY_PLAYBOOK"], + deploy_overrides=SUBCLOUD_DEPLOY_DICT["DEPLOY_OVERRIDES"], + deploy_chart=SUBCLOUD_DEPLOY_DICT["DEPLOY_CHART"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) SUBCLOUD_DEPLOY_NO_PLAYBOOK = sdm.SubcloudDeploy( - deploy_overrides=SUBCLOUD_DEPLOY_DICT['DEPLOY_OVERRIDES'], - deploy_chart=SUBCLOUD_DEPLOY_DICT['DEPLOY_CHART'], - prestage_images=SUBCLOUD_DEPLOY_DICT['DEPLOY_PRESTAGE_IMAGES'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + deploy_overrides=SUBCLOUD_DEPLOY_DICT["DEPLOY_OVERRIDES"], + deploy_chart=SUBCLOUD_DEPLOY_DICT["DEPLOY_CHART"], + prestage_images=SUBCLOUD_DEPLOY_DICT["DEPLOY_PRESTAGE_IMAGES"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) SUBCLOUD_DEPLOY_NO_PLAYBOOK_OVERRIDES = sdm.SubcloudDeploy( - deploy_chart=SUBCLOUD_DEPLOY_DICT['DEPLOY_CHART'], - prestage_images=SUBCLOUD_DEPLOY_DICT['DEPLOY_PRESTAGE_IMAGES'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + deploy_chart=SUBCLOUD_DEPLOY_DICT["DEPLOY_CHART"], + prestage_images=SUBCLOUD_DEPLOY_DICT["DEPLOY_PRESTAGE_IMAGES"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) SUBCLOUD_DEPLOY_NO_OVERRIDES_CHART = sdm.SubcloudDeploy( - deploy_playbook=SUBCLOUD_DEPLOY_DICT['DEPLOY_PLAYBOOK'], - prestage_images=SUBCLOUD_DEPLOY_DICT['DEPLOY_PRESTAGE_IMAGES'], - software_version=SUBCLOUD_DEPLOY_DICT['SOFTWARE_VERSION'] + deploy_playbook=SUBCLOUD_DEPLOY_DICT["DEPLOY_PLAYBOOK"], + prestage_images=SUBCLOUD_DEPLOY_DICT["DEPLOY_PRESTAGE_IMAGES"], + software_version=SUBCLOUD_DEPLOY_DICT["SOFTWARE_VERSION"], ) @@ -84,38 +84,49 @@ class TestCLISubcloudDeployManagerV1(base.BaseCommandTest): self.client = self.app.client_manager.subcloud_deploy_manager def test_subcloud_deploy_show(self): - self.client.subcloud_deploy_manager.subcloud_deploy_show.\ - return_value = [SUBCLOUD_DEPLOY_ALL] + self.client.subcloud_deploy_manager.subcloud_deploy_show.return_value = [ + SUBCLOUD_DEPLOY_ALL + ] # Without "--release" parameter actual_call1 = self.call(subcloud_deploy_cmd.SubcloudDeployShow) - self.assertEqual((DEPLOY_PLAYBOOK, - DEPLOY_OVERRIDES, - DEPLOY_CHART, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call1[1]) + self.assertEqual( + ( + DEPLOY_PLAYBOOK, + DEPLOY_OVERRIDES, + DEPLOY_CHART, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION, + ), + actual_call1[1], + ) # With "--release" parameter actual_call2 = self.call( subcloud_deploy_cmd.SubcloudDeployShow, - app_args=['--release', base.SOFTWARE_VERSION]) + app_args=["--release", base.SOFTWARE_VERSION], + ) - self.assertEqual((DEPLOY_PLAYBOOK, - DEPLOY_OVERRIDES, - DEPLOY_CHART, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call2[1]) + self.assertEqual( + ( + DEPLOY_PLAYBOOK, + DEPLOY_OVERRIDES, + DEPLOY_CHART, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION, + ), + actual_call2[1], + ) def test_subcloud_deploy_upload_all(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_ALL] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_ALL + ] - with tempfile.NamedTemporaryFile() as f1,\ - tempfile.NamedTemporaryFile() as f2,\ - tempfile.NamedTemporaryFile() as f3,\ + with tempfile.NamedTemporaryFile() as f1, \ + tempfile.NamedTemporaryFile() as f2, \ + tempfile.NamedTemporaryFile() as f3, \ tempfile.NamedTemporaryFile() as f4: file_path_1 = os.path.abspath(f1.name) file_path_2 = os.path.abspath(f2.name) @@ -124,24 +135,35 @@ class TestCLISubcloudDeployManagerV1(base.BaseCommandTest): actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, app_args=[ - '--deploy-playbook', file_path_1, - '--deploy-overrides', file_path_2, - '--deploy-chart', file_path_3, - '--prestage-images', file_path_4]) + "--deploy-playbook", + file_path_1, + "--deploy-overrides", + file_path_2, + "--deploy-chart", + file_path_3, + "--prestage-images", + file_path_4, + ], + ) - self.assertEqual((DEPLOY_PLAYBOOK, - DEPLOY_OVERRIDES, - DEPLOY_CHART, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call[1]) + self.assertEqual( + ( + DEPLOY_PLAYBOOK, + DEPLOY_OVERRIDES, + DEPLOY_CHART, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION, + ), + actual_call[1], + ) def test_subcloud_deploy_upload_no_prestage(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_NO_PRESTAGE] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_NO_PRESTAGE + ] - with tempfile.NamedTemporaryFile() as f1,\ - tempfile.NamedTemporaryFile() as f2,\ + with tempfile.NamedTemporaryFile() as f1, \ + tempfile.NamedTemporaryFile() as f2, \ tempfile.NamedTemporaryFile() as f3: file_path_1 = os.path.abspath(f1.name) file_path_2 = os.path.abspath(f2.name) @@ -149,40 +171,49 @@ class TestCLISubcloudDeployManagerV1(base.BaseCommandTest): actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, app_args=[ - '--deploy-playbook', file_path_1, - '--deploy-overrides', file_path_2, - '--deploy-chart', file_path_3]) + "--deploy-playbook", + file_path_1, + "--deploy-overrides", + file_path_2, + "--deploy-chart", + file_path_3, + ], + ) - self.assertEqual((DEPLOY_PLAYBOOK, - DEPLOY_OVERRIDES, - DEPLOY_CHART, - None, - base.SOFTWARE_VERSION), - actual_call[1]) + self.assertEqual( + ( + DEPLOY_PLAYBOOK, + DEPLOY_OVERRIDES, + DEPLOY_CHART, + None, + base.SOFTWARE_VERSION, + ), + actual_call[1], + ) def test_subcloud_deploy_upload_prestage(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_PRESTAGE] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_PRESTAGE + ] with tempfile.NamedTemporaryFile() as f1: file_path_1 = os.path.abspath(f1.name) actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, - app_args=[ - '--prestage-images', file_path_1]) - self.assertEqual((None, - None, - None, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call[1]) + app_args=["--prestage-images", file_path_1], + ) + self.assertEqual( + (None, None, None, DEPLOY_PRESTAGE_IMAGES, base.SOFTWARE_VERSION), + actual_call[1], + ) def test_subcloud_deploy_upload_no_playbook(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_NO_PLAYBOOK] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_NO_PLAYBOOK + ] - with tempfile.NamedTemporaryFile() as f1,\ - tempfile.NamedTemporaryFile() as f2,\ + with tempfile.NamedTemporaryFile() as f1, \ + tempfile.NamedTemporaryFile() as f2, \ tempfile.NamedTemporaryFile() as f3: file_path_1 = os.path.abspath(f1.name) file_path_2 = os.path.abspath(f2.name) @@ -190,82 +221,116 @@ class TestCLISubcloudDeployManagerV1(base.BaseCommandTest): actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, app_args=[ - '--deploy-overrides', file_path_1, - '--deploy-chart', file_path_2, - '--prestage-images', file_path_3]) - self.assertEqual((None, - DEPLOY_OVERRIDES, - DEPLOY_CHART, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call[1]) + "--deploy-overrides", + file_path_1, + "--deploy-chart", + file_path_2, + "--prestage-images", + file_path_3, + ], + ) + self.assertEqual( + ( + None, + DEPLOY_OVERRIDES, + DEPLOY_CHART, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION, + ), + actual_call[1], + ) def test_subcloud_deploy_upload_no_playbook_overrides(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_NO_PLAYBOOK_OVERRIDES] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_NO_PLAYBOOK_OVERRIDES + ] - with tempfile.NamedTemporaryFile() as f1,\ + with tempfile.NamedTemporaryFile() as f1, \ tempfile.NamedTemporaryFile() as f2: file_path_1 = os.path.abspath(f1.name) file_path_2 = os.path.abspath(f2.name) actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, app_args=[ - '--deploy-chart', file_path_1, - '--prestage-images', file_path_2]) - self.assertEqual((None, - None, - DEPLOY_CHART, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call[1]) + "--deploy-chart", + file_path_1, + "--prestage-images", + file_path_2, + ], + ) + self.assertEqual(( + None, None, + DEPLOY_CHART, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION + ), actual_call[1],) def test_subcloud_deploy_upload_no_overrides_chart(self): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_NO_OVERRIDES_CHART] + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_NO_OVERRIDES_CHART + ] - with tempfile.NamedTemporaryFile() as f1,\ + with tempfile.NamedTemporaryFile() as f1, \ tempfile.NamedTemporaryFile() as f2: file_path_1 = os.path.abspath(f1.name) file_path_2 = os.path.abspath(f2.name) actual_call = self.call( subcloud_deploy_cmd.SubcloudDeployUpload, app_args=[ - '--deploy-playbook', file_path_1, - '--prestage-images', file_path_2, - '--release', base.SOFTWARE_VERSION]) - self.assertEqual((DEPLOY_PLAYBOOK, - None, - None, - DEPLOY_PRESTAGE_IMAGES, - base.SOFTWARE_VERSION), - actual_call[1]) + "--deploy-playbook", + file_path_1, + "--prestage-images", + file_path_2, + "--release", + base.SOFTWARE_VERSION, + ], + ) + self.assertEqual( + ( + DEPLOY_PLAYBOOK, + None, + None, + DEPLOY_PRESTAGE_IMAGES, + base.SOFTWARE_VERSION, + ), + actual_call[1], + ) - @mock.patch('builtins.print') + @mock.patch("builtins.print") def test_subcloud_deploy_upload_invalid_path(self, mock_print): - self.client.subcloud_deploy_manager.subcloud_deploy_upload.\ - return_value = [SUBCLOUD_DEPLOY_NO_PRESTAGE] - file_path_1 = 'not_a_valid_path' - with tempfile.NamedTemporaryFile() as f2,\ + self.client.subcloud_deploy_manager.subcloud_deploy_upload.return_value = [ + SUBCLOUD_DEPLOY_NO_PRESTAGE + ] + mock_print.return_value = mock.ANY + file_path_1 = "not_a_valid_path" + with tempfile.NamedTemporaryFile() as f2, \ tempfile.NamedTemporaryFile() as f3: file_path_2 = os.path.abspath(f2.name) file_path_3 = os.path.abspath(f3.name) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_deploy_cmd.SubcloudDeployUpload, - app_args=['--deploy-playbook', file_path_1, - '--deploy-overrides', file_path_2, - '--deploy-chart', file_path_3]) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_deploy_cmd.SubcloudDeployUpload, + app_args=[ + "--deploy-playbook", + file_path_1, + "--deploy-overrides", + file_path_2, + "--deploy-chart", + file_path_3, + ], + ) - self.assertTrue('deploy_playbook file does not exist: not_a_valid_path' - in str(e)) + self.assertTrue( + "deploy_playbook file does not exist: not_a_valid_path" in str(e) + ) def test_subcloud_deploy_delete_with_release(self): release_version = base.SOFTWARE_VERSION - data = {'prestage_images': 'False', 'deployment_files': 'False'} - app_args = ['--release', release_version] + data = {"prestage_images": "False", "deployment_files": "False"} + app_args = ["--release", release_version] self.call(subcloud_deploy_cmd.SubcloudDeployDelete, app_args=app_args) @@ -275,6 +340,6 @@ class TestCLISubcloudDeployManagerV1(base.BaseCommandTest): def test_subcloud_deploy_delete_without_release(self): self.call(subcloud_deploy_cmd.SubcloudDeployDelete) - data = {'prestage_images': 'False', 'deployment_files': 'False'} + data = {"prestage_images": "False", "deployment_files": "False"} self.client.subcloud_deploy_manager.subcloud_deploy_delete.\ assert_called_once_with(None, data=data) diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_manager.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_manager.py index 8e76d3b..d628f8c 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_manager.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_subcloud_manager.py @@ -15,9 +15,10 @@ # import copy -import mock import os import tempfile + +import mock import yaml from dcmanagerclient.commands.v1 import subcloud_manager as subcloud_cmd @@ -28,388 +29,458 @@ from dcmanagerclient.tests import base class TestCLISubcloudManagerV1(base.BaseCommandTest): def test_list_subclouds(self): - self.client.subcloud_manager.list_subclouds.return_value = \ - [base.SUBCLOUD_RESOURCE] + self.client.subcloud_manager.list_subclouds.return_value = [ + base.SUBCLOUD_RESOURCE + ] actual_call = self.call(subcloud_cmd.ListSubcloud) - self.assertEqual([base.SUBCLOUD_LIST_RESULT], - actual_call[1]) + self.assertEqual([base.SUBCLOUD_LIST_RESULT], actual_call[1]) def test_negative_list_subclouds(self): self.client.subcloud_manager.list_subclouds.return_value = [] actual_call = self.call(subcloud_cmd.ListSubcloud) - self.assertEqual(base.EMPTY_SUBCLOUD_LIST_RESULT, - actual_call[1]) + self.assertEqual(base.EMPTY_SUBCLOUD_LIST_RESULT, actual_call[1]) def test_list_subclouds_with_all_fields(self): - self.client.subcloud_manager.list_subclouds.return_value = \ - [base.SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS] - actual_call = self.call(subcloud_cmd.ListSubcloud, app_args=['-d']) - self.assertEqual([base.SUBCLOUD_ALL_FIELDS_RESULT_LIST], - actual_call[1]) + self.client.subcloud_manager.list_subclouds.return_value = [ + base.SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS + ] + actual_call = self.call(subcloud_cmd.ListSubcloud, app_args=["-d"]) + self.assertEqual([base.SUBCLOUD_ALL_FIELDS_RESULT_LIST], actual_call[1]) def test_list_subclouds_with_all_empty_fields(self): self.client.subcloud_manager.list_subclouds.return_value = [] - actual_call = self.call(subcloud_cmd.ListSubcloud, - app_args=['--detail']) - self.assertEqual(base.EMPTY_SUBCLOUD_ALL_FIELDS_RESULT, - actual_call[1]) + actual_call = self.call(subcloud_cmd.ListSubcloud, app_args=["--detail"]) + self.assertEqual(base.EMPTY_SUBCLOUD_ALL_FIELDS_RESULT, actual_call[1]) def test_list_subclouds_with_specified_columns(self): - self.client.subcloud_manager.list_subclouds.return_value = \ - [base.SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS] - self.call(subcloud_cmd.ListSubcloud, - app_args=['-c', 'name', - '-c', 'prestage_status', - '-c', 'prestage_versions']) - self.assertEqual(self.parsed_args.columns, - ['name', 'prestage_status', 'prestage_versions']) + self.client.subcloud_manager.list_subclouds.return_value = [ + base.SUBCLOUD_RESOURCE_WITH_ALL_LIST_FIELDS + ] + self.call( + subcloud_cmd.ListSubcloud, + app_args=[ + "-c", + "name", + "-c", + "prestage_status", + "-c", + "prestage_versions", + ], + ) + self.assertEqual( + self.parsed_args.columns, + ["name", "prestage_status", "prestage_versions"], + ) def test_delete_subcloud_with_subcloud_id(self): self.call(subcloud_cmd.DeleteSubcloud, app_args=[base.ID]) - self.client.subcloud_manager.delete_subcloud.\ - assert_called_once_with(base.ID) + self.client.subcloud_manager.delete_subcloud.assert_called_once_with(base.ID) def test_delete_subcloud_without_subcloud_id(self): - self.assertRaises(SystemExit, self.call, - subcloud_cmd.DeleteSubcloud, app_args=[]) + self.assertRaises( + SystemExit, self.call, subcloud_cmd.DeleteSubcloud, app_args=[] + ) def test_show_subcloud_with_subcloud_id(self): - self.client.subcloud_manager.subcloud_detail.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_manager.subcloud_detail.return_value = [ + base.SUBCLOUD_RESOURCE + ] actual_call = self.call(subcloud_cmd.ShowSubcloud, app_args=[base.ID]) self.assertEqual( base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + (base.REGION_NAME,), - actual_call[1] + actual_call[1], ) def test_show_subcloud_with_additional_detail(self): subcloud_with_additional_detail = copy.copy(base.SUBCLOUD_RESOURCE) - subcloud_with_additional_detail.oam_floating_ip = \ + subcloud_with_additional_detail.oam_floating_ip = ( base.EXTERNAL_OAM_FLOATING_ADDRESS - subcloud_with_additional_detail.deploy_config_sync_status = \ + ) + subcloud_with_additional_detail.deploy_config_sync_status = ( base.DEPLOY_CONFIG_SYNC_STATUS - subcloud_with_additional_detail.region_name = \ - base.REGION_NAME - self.client.subcloud_manager.subcloud_additional_details.\ - return_value = [subcloud_with_additional_detail] - actual_call = self.call(subcloud_cmd.ShowSubcloud, - app_args=[base.ID, '--detail']) + ) + subcloud_with_additional_detail.region_name = base.REGION_NAME + self.client.subcloud_manager.subcloud_additional_details.return_value = [ + subcloud_with_additional_detail + ] + actual_call = self.call( + subcloud_cmd.ShowSubcloud, app_args=[base.ID, "--detail"] + ) self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + - (base.EXTERNAL_OAM_FLOATING_ADDRESS, - base.DEPLOY_CONFIG_SYNC_STATUS, base.REGION_NAME), - actual_call[1]) + base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + + ( + base.EXTERNAL_OAM_FLOATING_ADDRESS, + base.DEPLOY_CONFIG_SYNC_STATUS, + base.REGION_NAME, + ), + actual_call[1], + ) def test_show_subcloud_negative(self): self.client.subcloud_manager.subcloud_detail.return_value = [] actual_call = self.call(subcloud_cmd.ShowSubcloud, app_args=[base.ID]) self.assertEqual( - base.EMPTY_SUBCLOUD_FIELD_RESULT_WITH_PEERID_REHOME_DATA, - actual_call[1]) + base.EMPTY_SUBCLOUD_FIELD_RESULT_WITH_PEERID_REHOME_DATA, actual_call[1] + ) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_add_subcloud(self, getpass): - self.client.subcloud_manager.add_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] + @mock.patch("getpass.getpass", return_value="testpassword") + def test_add_subcloud(self, _mock_getpass): + self.client.subcloud_manager.add_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] - with tempfile.NamedTemporaryFile(mode='w') as f: + with tempfile.NamedTemporaryFile(mode="w") as f: yaml.dump(base.FAKE_BOOTSTRAP_VALUES, f) file_path = os.path.abspath(f.name) # Without "--release" parameter actual_call1 = self.call( - subcloud_cmd.AddSubcloud, app_args=[ - '--bootstrap-address', base.BOOTSTRAP_ADDRESS, - '--bootstrap-values', file_path, - ]) + subcloud_cmd.AddSubcloud, + app_args=[ + "--bootstrap-address", + base.BOOTSTRAP_ADDRESS, + "--bootstrap-values", + file_path, + ], + ) # With "--release" parameter actual_call2 = self.call( - subcloud_cmd.AddSubcloud, app_args=[ - '--bootstrap-address', base.BOOTSTRAP_ADDRESS, - '--bootstrap-values', file_path, - '--release', base.SOFTWARE_VERSION, - ]) + subcloud_cmd.AddSubcloud, + app_args=[ + "--bootstrap-address", + base.BOOTSTRAP_ADDRESS, + "--bootstrap-values", + file_path, + "--release", + base.SOFTWARE_VERSION, + ], + ) self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call1[1]) + base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call1[1] + ) self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call2[1]) + base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call2[1] + ) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_add_migrate_subcloud(self, getpass): - self.client.subcloud_manager.add_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] + @mock.patch("getpass.getpass", return_value="testpassword") + def test_add_migrate_subcloud(self, _mock_getpass): + self.client.subcloud_manager.add_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] - with tempfile.NamedTemporaryFile(mode='w') as f: + with tempfile.NamedTemporaryFile(mode="w") as f: yaml.dump(base.FAKE_BOOTSTRAP_VALUES, f) file_path = os.path.abspath(f.name) actual_call = self.call( - subcloud_cmd.AddSubcloud, app_args=[ - '--bootstrap-address', base.BOOTSTRAP_ADDRESS, - '--bootstrap-values', file_path, - '--migrate', - ]) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + subcloud_cmd.AddSubcloud, + app_args=[ + "--bootstrap-address", + base.BOOTSTRAP_ADDRESS, + "--bootstrap-values", + file_path, + "--migrate", + ], + ) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_add_migrate_subcloud_with_deploy_config(self, getpass): - self.client.subcloud_manager.add_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] + @mock.patch("getpass.getpass", return_value="testpassword") + def test_add_migrate_subcloud_with_deploy_config(self, _mock_getpass): + self.client.subcloud_manager.add_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] - with tempfile.NamedTemporaryFile(mode='w') as f_bootstrap: + with tempfile.NamedTemporaryFile(mode="w") as f_bootstrap: bootstrap_file_path = os.path.abspath(f_bootstrap.name) with tempfile.NamedTemporaryFile() as f_config: config_file_path = os.path.abspath(f_config.name) self.assertRaises( - DCManagerClientException, self.call, - subcloud_cmd.AddSubcloud, app_args=[ - '--bootstrap-address', base.BOOTSTRAP_ADDRESS, - '--bootstrap-values', bootstrap_file_path, - '--deploy-config', config_file_path, - '--migrate', - ]) + DCManagerClientException, + self.call, + subcloud_cmd.AddSubcloud, + app_args=[ + "--bootstrap-address", + base.BOOTSTRAP_ADDRESS, + "--bootstrap-values", + bootstrap_file_path, + "--deploy-config", + config_file_path, + "--migrate", + ], + ) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_add_migrate_subcloud_with_name_change(self, getpass): - SUBCLOUD_RESOURCE = copy.\ - copy(base.SUBCLOUD_RESOURCE_WITH_PEERID) - SUBCLOUD_RESOURCE.name = base.NAME_SC2 - self.client.subcloud_manager.add_subcloud.\ - return_value = [SUBCLOUD_RESOURCE] + @mock.patch("getpass.getpass", return_value="testpassword") + def test_add_migrate_subcloud_with_name_change(self, _mock_getpass): + subcloud_resource = copy.copy(base.SUBCLOUD_RESOURCE_WITH_PEERID) + subcloud_resource.name = base.NAME_SC2 + self.client.subcloud_manager.add_subcloud.return_value = [subcloud_resource] - with tempfile.NamedTemporaryFile(mode='w') as f: + with tempfile.NamedTemporaryFile(mode="w") as f: yaml.dump(base.FAKE_BOOTSTRAP_VALUES, f) file_path = os.path.abspath(f.name) actual_call = self.call( - subcloud_cmd.AddSubcloud, app_args=[ - '--bootstrap-address', base.BOOTSTRAP_ADDRESS, - '--bootstrap-values', file_path, - '--migrate', - '--name', base.NAME_SC2 - ]) - SUBCLOUD_FIELD_RESULT = base.\ - SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID - RESULT_LIST = list(SUBCLOUD_FIELD_RESULT) - RESULT_LIST[1] = base.NAME_SC2 - self.assertEqual(tuple(RESULT_LIST), actual_call[1]) + subcloud_cmd.AddSubcloud, + app_args=[ + "--bootstrap-address", + base.BOOTSTRAP_ADDRESS, + "--bootstrap-values", + file_path, + "--migrate", + "--name", + base.NAME_SC2, + ], + ) + subcloud_field_result = base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + result_list = list(subcloud_field_result) + result_list[1] = base.NAME_SC2 + self.assertEqual(tuple(result_list), actual_call[1]) def test_rename_subcloud(self): - SUBCLOUD_RENAMED = copy.\ - copy(base.SUBCLOUD_RESOURCE_WITH_PEERID) - SUBCLOUD_RENAMED.name = base.NAME_SC2 - self.client.subcloud_manager.update_subcloud.\ - return_value = [SUBCLOUD_RENAMED] + subcloud_renamed = copy.copy(base.SUBCLOUD_RESOURCE_WITH_PEERID) + subcloud_renamed.name = base.NAME_SC2 + self.client.subcloud_manager.update_subcloud.return_value = [ + subcloud_renamed + ] # Rename by id actual_call1 = self.call( - subcloud_cmd.UpdateSubcloud, - app_args=[base.ID, '--name', base.NAME_SC2]) - results_by_id = \ - list(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID) + subcloud_cmd.UpdateSubcloud, app_args=[base.ID, "--name", base.NAME_SC2] + ) + results_by_id = list(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID) results_by_id[1] = base.NAME_SC2 # Rename by name actual_call2 = self.call( subcloud_cmd.UpdateSubcloud, - app_args=[base.NAME, '--name', base.NAME_SC2]) + app_args=[base.NAME, "--name", base.NAME_SC2], + ) - SUBCLOUD_FIELD_RESULT = base.\ - SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID - results_by_name = list(SUBCLOUD_FIELD_RESULT) + subcloud_field_result = base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + results_by_name = list(subcloud_field_result) results_by_name[1] = base.NAME_SC2 self.assertEqual(tuple(results_by_id), actual_call1[1]) self.assertEqual(tuple(results_by_name), actual_call2[1]) def test_update_fields_values(self): - SUBCLOUD_WITH_REGION_DETAIL = copy.copy(base.SUBCLOUD_RESOURCE) - SUBCLOUD_WITH_REGION_DETAIL.region_name = base.REGION_NAME + subcloud_with_region_detail = copy.copy(base.SUBCLOUD_RESOURCE) + subcloud_with_region_detail.region_name = base.REGION_NAME - SUBCLOUD_WITH_REGION_NONE = copy.copy(base.SUBCLOUD_RESOURCE) - SUBCLOUD_WITH_REGION_NONE.region_name = None + subcloud_with_region_none = copy.copy(base.SUBCLOUD_RESOURCE) + subcloud_with_region_none.region_name = None - subcloud_cmd.update_fields_values([SUBCLOUD_WITH_REGION_DETAIL]) + subcloud_cmd.update_fields_values([subcloud_with_region_detail]) - self.assertEqual(SUBCLOUD_WITH_REGION_DETAIL.region_name, - SUBCLOUD_WITH_REGION_NONE.region_name) + self.assertEqual( + subcloud_with_region_detail.region_name, + subcloud_with_region_none.region_name, + ) def test_unmanage_subcloud(self): - self.client.subcloud_manager.update_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] - actual_call = self.call( - subcloud_cmd.UnmanageSubcloud, app_args=[base.ID]) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + self.client.subcloud_manager.update_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] + actual_call = self.call(subcloud_cmd.UnmanageSubcloud, app_args=[base.ID]) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) def test_unmanage_subcloud_with_migrate(self): - self.client.subcloud_manager.update_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] + self.client.subcloud_manager.update_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] actual_call = self.call( - subcloud_cmd.UnmanageSubcloud, app_args=[base.ID, '--migrate']) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + subcloud_cmd.UnmanageSubcloud, app_args=[base.ID, "--migrate"] + ) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) def test_unmanage_subcloud_without_subcloud_id(self): - self.assertRaises(SystemExit, self.call, - subcloud_cmd.UnmanageSubcloud, app_args=[]) + self.assertRaises( + SystemExit, self.call, subcloud_cmd.UnmanageSubcloud, app_args=[] + ) def test_manage_subcloud(self): - self.client.subcloud_manager.update_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] - actual_call = self.call( - subcloud_cmd.ManageSubcloud, app_args=[base.ID]) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + self.client.subcloud_manager.update_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] + actual_call = self.call(subcloud_cmd.ManageSubcloud, app_args=[base.ID]) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) def test_manage_subcloud_without_subcloud_id(self): - self.assertRaises(SystemExit, self.call, - subcloud_cmd.ManageSubcloud, app_args=[]) + self.assertRaises( + SystemExit, self.call, subcloud_cmd.ManageSubcloud, app_args=[] + ) def test_update_subcloud(self): - self.client.subcloud_manager.update_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] - with tempfile.NamedTemporaryFile(mode='w') as f_bootstrap: + self.client.subcloud_manager.update_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] + with tempfile.NamedTemporaryFile(mode="w") as f_bootstrap: bootstrap_file_path = os.path.abspath(f_bootstrap.name) actual_call = self.call( subcloud_cmd.UpdateSubcloud, app_args=[ base.ID, - '--description', 'subcloud description', - '--location', 'subcloud location', - '--sysadmin-password', 'testpassword', - '--management-subnet', 'subcloud network subnet', - '--management-gateway-ip', 'subcloud network gateway ip', - '--management-start-ip', 'sc network start ip', - '--management-end-ip', 'subcloud network end ip', - '--bootstrap-address', 'subcloud bootstrap address', - '--bootstrap-values', bootstrap_file_path, - '--peer-group', 'peer group']) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + "--description", + "subcloud description", + "--location", + "subcloud location", + "--sysadmin-password", + "testpassword", + "--management-subnet", + "subcloud network subnet", + "--management-gateway-ip", + "subcloud network gateway ip", + "--management-start-ip", + "sc network start ip", + "--management-end-ip", + "subcloud network end ip", + "--bootstrap-address", + "subcloud bootstrap address", + "--bootstrap-values", + bootstrap_file_path, + "--peer-group", + "peer group", + ], + ) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) - @mock.patch('getpass.getpass', return_value='testpassword') - @mock.patch('six.moves.input', return_value='redeploy') - def test_redeploy_subcloud(self, mock_input, getpass): - self.client.subcloud_manager.redeploy_subcloud. \ - return_value = [base.SUBCLOUD_RESOURCE] + @mock.patch("getpass.getpass", return_value="testpassword") + @mock.patch("six.moves.input", return_value="redeploy") + def test_redeploy_subcloud(self, _mock_input, _mock_getpass): + self.client.subcloud_manager.redeploy_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] - with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file,\ - tempfile.NamedTemporaryFile(mode='w') as config_file,\ - tempfile.NamedTemporaryFile(mode='w') as install_file: + with tempfile.NamedTemporaryFile( + mode="w" + ) as bootstrap_file, tempfile.NamedTemporaryFile( + mode="w" + ) as config_file, tempfile.NamedTemporaryFile( + mode="w" + ) as install_file: bootstrap_file_path = os.path.abspath(bootstrap_file.name) config_file_path = os.path.abspath(config_file.name) install_file_path = os.path.abspath(install_file.name) actual_call = self.call( - subcloud_cmd.RedeploySubcloud, app_args=[ + subcloud_cmd.RedeploySubcloud, + app_args=[ base.NAME, - '--bootstrap-values', bootstrap_file_path, - '--install-values', install_file_path, - '--deploy-config', config_file_path, - '--release', base.SOFTWARE_VERSION, - ]) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + "--bootstrap-values", + bootstrap_file_path, + "--install-values", + install_file_path, + "--deploy-config", + config_file_path, + "--release", + base.SOFTWARE_VERSION, + ], + ) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) - @mock.patch('getpass.getpass', return_value='testpassword') - @mock.patch('six.moves.input', return_value='redeploy') - def test_redeploy_subcloud_no_parameters(self, mock_input, getpass): - self.client.subcloud_manager.redeploy_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] - actual_call = self.call( - subcloud_cmd.RedeploySubcloud, - app_args=[base.ID]) - self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call[1]) + @mock.patch("getpass.getpass", return_value="testpassword") + @mock.patch("six.moves.input", return_value="redeploy") + def test_redeploy_subcloud_no_parameters(self, _mock_input, _mock_getpass): + self.client.subcloud_manager.redeploy_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] + actual_call = self.call(subcloud_cmd.RedeploySubcloud, app_args=[base.ID]) + self.assertEqual(base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, actual_call[1]) - @mock.patch('getpass.getpass', return_value='testpassword') - @mock.patch('six.moves.input', return_value='redeploy') + @mock.patch("getpass.getpass", return_value="testpassword") + @mock.patch("six.moves.input", return_value="redeploy") def test_redeploy_bootstrap_files_does_not_exists( - self, mock_input, getpass): - self.client.subcloud_manager.redeploy_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] - with tempfile.NamedTemporaryFile(mode='w') as bootstrap_file,\ - tempfile.NamedTemporaryFile(mode='w') as config_file,\ - tempfile.NamedTemporaryFile(mode='w') as install_file: - + self, _mock_input, _mock_getpass + ): + self.client.subcloud_manager.redeploy_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] + with tempfile.NamedTemporaryFile( + mode="w" + ) as bootstrap_file, tempfile.NamedTemporaryFile( + mode="w" + ) as config_file, tempfile.NamedTemporaryFile( + mode="w" + ) as install_file: bootstrap_file_path = os.path.abspath(bootstrap_file.name) config_file_path = os.path.abspath(config_file.name) install_file_path = os.path.abspath(install_file.name) - app_args_install = [base.NAME, - '--install-values', install_file_path] - app_args_bootstrap = [base.NAME, - '--bootstrap-values', bootstrap_file_path] - app_args_config = [base.NAME, '--deploy-config', config_file_path] - args_dict = {'install-values': app_args_install, - 'bootstrap-values': app_args_bootstrap, - 'deploy-config': app_args_config} + app_args_install = [base.NAME, "--install-values", install_file_path] + app_args_bootstrap = [base.NAME, "--bootstrap-values", bootstrap_file_path] + app_args_config = [base.NAME, "--deploy-config", config_file_path] + args_dict = { + "install-values": app_args_install, + "bootstrap-values": app_args_bootstrap, + "deploy-config": app_args_config, + } - for file in ['install-values', 'bootstrap-values', - 'deploy-config']: - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_cmd.RedeploySubcloud, - app_args=args_dict[file]) - self.assertTrue(f'{file} does not exist' in str(e)) + for file in ["install-values", "bootstrap-values", "deploy-config"]: + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_cmd.RedeploySubcloud, + app_args=args_dict[file], + ) + self.assertTrue(f"{file} does not exist" in str(e)) - @mock.patch('getpass.getpass', return_value='testpassword') - def test_restore_subcloud(self, getpass): + @mock.patch("getpass.getpass", return_value="testpassword") + def test_restore_subcloud(self, _mock_getpass): with tempfile.NamedTemporaryFile() as f: file_path = os.path.abspath(f.name) - e = self.assertRaises(DCManagerClientException, - self.call, - subcloud_cmd.RestoreSubcloud, - app_args=[base.ID, - '--restore-values', - file_path]) + e = self.assertRaises( + DCManagerClientException, + self.call, + subcloud_cmd.RestoreSubcloud, + app_args=[base.ID, "--restore-values", file_path], + ) - deprecation_msg = ('This command has been deprecated. Please use ' - 'subcloud-backup restore instead.') + deprecation_msg = ( + "This command has been deprecated. Please use " + "subcloud-backup restore instead." + ) self.assertTrue(deprecation_msg in str(e)) - def test_prestage_with_subcloudID(self): - self.client.subcloud_manager.prestage_subcloud.\ - return_value = [base.SUBCLOUD_RESOURCE] + def test_prestage_with_subcloud_id(self): + self.client.subcloud_manager.prestage_subcloud.return_value = [ + base.SUBCLOUD_RESOURCE + ] actual_call_without_release = self.call( subcloud_cmd.PrestageSubcloud, - app_args=[base.ID, - '--sysadmin-password', 'testpassword', - '--force']) + app_args=[base.ID, "--sysadmin-password", "testpassword", "--force"], + ) self.assertEqual( base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID, - actual_call_without_release[1]) + actual_call_without_release[1], + ) - def test_prestage_without_subcloudID(self): - self.assertRaises(SystemExit, self.call, - subcloud_cmd.PrestageSubcloud, app_args=[]) + def test_prestage_without_subcloud_id(self): + self.assertRaises( + SystemExit, self.call, subcloud_cmd.PrestageSubcloud, app_args=[] + ) def test_prestage_with_release(self): - SUBCLOUD_WITH_ADDITIONAL_DETAIL = copy.copy(base.SUBCLOUD_RESOURCE) - SUBCLOUD_WITH_ADDITIONAL_DETAIL.prestage_software_version = \ + subcloud_with_additional_detail = copy.copy(base.SUBCLOUD_RESOURCE) + subcloud_with_additional_detail.prestage_software_version = ( base.SOFTWARE_VERSION - self.client.subcloud_manager.prestage_subcloud.return_value = \ - [SUBCLOUD_WITH_ADDITIONAL_DETAIL] + ) + self.client.subcloud_manager.prestage_subcloud.return_value = [ + subcloud_with_additional_detail + ] actual_call_with_release = self.call( subcloud_cmd.PrestageSubcloud, - app_args=[base.ID, - '--sysadmin-password', 'testpassword', - '--force', - '--release', base.SOFTWARE_VERSION]) + app_args=[ + base.ID, + "--sysadmin-password", + "testpassword", + "--force", + "--release", + base.SOFTWARE_VERSION, + ], + ) self.assertEqual( - base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + - (base.SOFTWARE_VERSION,), - actual_call_with_release[1]) + base.SUBCLOUD_FIELD_RESULT_LIST_WITH_PEERID + (base.SOFTWARE_VERSION,), + actual_call_with_release[1], + ) diff --git a/distributedcloud-client/dcmanagerclient/tests/v1/test_sw_deploy_manager.py b/distributedcloud-client/dcmanagerclient/tests/v1/test_sw_deploy_manager.py index a4820f1..ff87e19 100644 --- a/distributedcloud-client/dcmanagerclient/tests/v1/test_sw_deploy_manager.py +++ b/distributedcloud-client/dcmanagerclient/tests/v1/test_sw_deploy_manager.py @@ -36,8 +36,8 @@ class TestSwDeployStrategy(UpdateStrategyMixin, base.BaseCommandTest): # mock the result of the API call strategy = utils.make_strategy( - strategy_type=expected_strategy_type, extra_args={ - "release_id": "stx-24.09.1"} + strategy_type=expected_strategy_type, + extra_args={"release_id": "stx-24.09.1"}, ) # mock that there is no pre-existing strategy @@ -46,7 +46,8 @@ class TestSwDeployStrategy(UpdateStrategyMixin, base.BaseCommandTest): # invoke the backend method for the CLI. # Returns a tuple of field descriptions, and a second tuple of values fields, results = self.call( - self.create_command, ["--release-id", "stx-24.09.1"]) + self.create_command, ["--release-id", "stx-24.09.1"] + ) # results is a tuple of expected length self.assertEqual(len(results), self.results_length) @@ -59,10 +60,10 @@ class TestSwDeployStrategy(UpdateStrategyMixin, base.BaseCommandTest): # - state # - created_at # - updated_at - + failure_index = fields.index("stop on failure") self.assertEqual(results[0], expected_strategy_type) - self.assertEqual(fields[-4], "release_id") - self.assertEqual(results[-4], "stx-24.09.1") + self.assertEqual(fields[failure_index + 1], "release_id") + self.assertEqual(results[failure_index + 1], "stx-24.09.1") def test_create_strategy_without_release_id(self): """Test deploy strategy should not be created without --release-id""" diff --git a/distributedcloud-client/dcmanagerclient/utils.py b/distributedcloud-client/dcmanagerclient/utils.py index 6312238..f2306b1 100644 --- a/distributedcloud-client/dcmanagerclient/utils.py +++ b/distributedcloud-client/dcmanagerclient/utils.py @@ -19,10 +19,9 @@ import getpass import json import os -import yaml +from urllib import parse, request -from six.moves.urllib import parse -from six.moves.urllib import request +import yaml from dcmanagerclient import exceptions @@ -44,7 +43,7 @@ def do_action_on_many(action, resources, success_msg, error_msg): def load_content(content): - if content is None or content == '': + if content is None or content == "": return dict() try: @@ -64,7 +63,7 @@ def get_contents_if_file(contents_or_file_name): """ if os.path.isdir(contents_or_file_name): - error_msg = "Error: %s is a directory." % contents_or_file_name + error_msg = f"Error: {contents_or_file_name} is a directory." raise exceptions.DCManagerClientException(error_msg) try: @@ -72,61 +71,61 @@ def get_contents_if_file(contents_or_file_name): definition_url = contents_or_file_name else: path = os.path.abspath(contents_or_file_name) - definition_url = parse.urljoin( - 'file:', - request.pathname2url(path) - ) - return request.urlopen(definition_url).read().decode('utf8') + definition_url = parse.urljoin("file:", request.pathname2url(path)) + return request.urlopen(definition_url).read().decode("utf8") except Exception as e: raise exceptions.DCManagerClientException( - "Error: Could not open file %s: %s" % (contents_or_file_name, e)) + f"Error: Could not open file {contents_or_file_name}: {e}" + ) -def prompt_for_password(password_type='sysadmin', item_type='subcloud'): +def prompt_for_password(password_type="sysadmin", item_type="subcloud"): while True: try: password = getpass.getpass( - f"Enter the {password_type} password for the {item_type}: ") + f"Enter the {password_type} password for the {item_type}: " + ) if len(password) < 1: print("Password cannot be empty") continue confirm = getpass.getpass( - f"Re-enter {password_type} password to confirm: ") + f"Re-enter {password_type} password to confirm: " + ) if password != confirm: print("Passwords did not match") continue break - except KeyboardInterrupt: + except KeyboardInterrupt as e: raise exceptions.DCManagerClientException( "\nPassword prompt interrupted." - ) + ) from e return password def subcloud_detail_format(subcloud=None): columns = ( - 'id', - 'name', - 'description', - 'location', - 'software_version', - 'management', - 'availability', - 'deploy_status', - 'management_subnet', - 'management_start_ip', - 'management_end_ip', - 'management_gateway_ip', - 'systemcontroller_gateway_ip', - 'group_id', - 'peer_group_id', - 'created_at', - 'updated_at', - 'backup_status', - 'backup_datetime', - 'prestage_status', - 'prestage_versions' + "id", + "name", + "description", + "location", + "software_version", + "management", + "availability", + "deploy_status", + "management_subnet", + "management_start_ip", + "management_end_ip", + "management_gateway_ip", + "systemcontroller_gateway_ip", + "group_id", + "peer_group_id", + "created_at", + "updated_at", + "backup_status", + "backup_datetime", + "prestage_status", + "prestage_versions", ) if subcloud: @@ -151,20 +150,19 @@ def subcloud_detail_format(subcloud=None): subcloud.backup_status, subcloud.backup_datetime, subcloud.prestage_status, - subcloud.prestage_versions + subcloud.prestage_versions, ) - for _listitem, sync_status in enumerate(subcloud.endpoint_sync_status): - added_field = (sync_status['endpoint_type'] + - "_sync_status",) - added_value = (sync_status['sync_status'],) + for _, sync_status in enumerate(subcloud.endpoint_sync_status): + added_field = (sync_status["endpoint_type"] + "_sync_status",) + added_value = (sync_status["sync_status"],) columns += tuple(added_field) data += tuple(added_value) if subcloud.oam_floating_ip != "unavailable": - columns += ('oam_floating_ip',) + columns += ("oam_floating_ip",) data += (subcloud.oam_floating_ip,) else: - data = (('',) * len(columns),) + data = (("",) * len(columns),) return columns, data diff --git a/distributedcloud-client/requirements.txt b/distributedcloud-client/requirements.txt index ad7dbe6..04217e5 100644 --- a/distributedcloud-client/requirements.txt +++ b/distributedcloud-client/requirements.txt @@ -1,6 +1,8 @@ # The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. + +beautifulsoup4 # MIT cliff>=2.3.0 # Apache-2.0 osc-lib>=1.2.0 # Apache-2.0 osprofiler>=1.4.0 # Apache-2.0 @@ -8,6 +10,5 @@ pbr>=2.0.0 # Apache-2.0 python-keystoneclient>=3.8.0 # Apache-2.0 PyYAML>=3.10.0 # MIT requests!=2.12.2,!=2.13.0,>=2.10.0 # Apache-2.0 +requests-toolbelt # Apache-2.0 six>=1.9.0 # MIT -beautifulsoup4 -requests-toolbelt diff --git a/distributedcloud-client/test-requirements-debian.txt b/distributedcloud-client/test-requirements-debian.txt deleted file mode 100644 index c33906f..0000000 --- a/distributedcloud-client/test-requirements-debian.txt +++ /dev/null @@ -1,20 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -hacking>=1.1.0,<=2.0.0 # Apache-2.0 -astroid==2.11.5;python_version>"3.7" # GPLv2 -isort<5;python_version>="3.5" -pylint==2.14.1;python_version>"3.7" # GPLv2 -python-openstackclient>=3.3.0 # Apache-2.0 -sphinx>=1.5.1;python_version<="3.6" # BSD -sphinx>=1.6.2;python_version>="3.6" # BSD -fixtures>=3.0.0 # Apache-2.0/BSD -mock>=2.0 # BSD -tempest>=14.0.0 # Apache-2.0 -testtools>=1.4.0 # MIT -PyYAML>=3.1.0 -yamllint<1.26.1;python_version>="3.0" # GPLv2 -python-dev-tools;python_version>="3.9" -beautifulsoup4;python_version>="3.9" -osprofiler;python_version>="3.8" -requests_toolbelt;python_version>="3.0" diff --git a/distributedcloud-client/test-requirements.txt b/distributedcloud-client/test-requirements.txt index 7369ef0..24d0d6e 100644 --- a/distributedcloud-client/test-requirements.txt +++ b/distributedcloud-client/test-requirements.txt @@ -1,18 +1,11 @@ # The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. -hacking!=0.13.0,<0.14,>=0.12.0 -astroid<= 2.2.5;python_version>="3.0" # GPLv2 -isort<5;python_version>="3.5" -pylint<2.1.0;python_version<"3.0" # GPLv2 -pylint<2.3.0;python_version>="3.0" # GPLv2 -python-openstackclient>=3.3.0 # Apache-2.0 -sphinx>=1.5.1;python_version<="3.6" # BSD -sphinx>=1.6.2;python_version>="3.6" # BSD -fixtures>=3.0.0 # Apache-2.0/BSD + +coverage!=4.4,>=4.0 # Apache-2.0 +flake8 # MIT mock>=2.0 # BSD +pylint==2.14.1 # GPLv2 +python-dev-tools # Apache-2.0 tempest>=14.0.0 # Apache-2.0 testtools>=1.4.0 # MIT -PyYAML>=3.1.0 -yamllint<1.26.1;python_version>="3.0" # GPLv2 -python-dev-tools;python_version>="3.9" \ No newline at end of file diff --git a/distributedcloud-client/tox.ini b/distributedcloud-client/tox.ini index f018b19..2747084 100644 --- a/distributedcloud-client/tox.ini +++ b/distributedcloud-client/tox.ini @@ -1,6 +1,6 @@ [tox] -minversion = 2.3 envlist = py39,pep8,pylint +minversion = 2.3 skipsdist = True toxworkdir = /tmp/{env:USER}_dc_client_tox @@ -19,12 +19,12 @@ setenv = OS_STDERR_CAPTURE=1 OS_TEST_TIMEOUT=60 -deps = -r{toxinidir}/test-requirements-debian.txt +deps = -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt + allowlist_externals = rm find - reno [testenv:py39] skipdist = False @@ -33,24 +33,28 @@ commands = stestr --test-path={[dcclient]client_base_dir}/dcmanagerclient/tests run '{posargs}' [testenv:pep8] -deps = -r{toxinidir}/test-requirements-debian.txt commands = flake8 -[testenv:pylint] -deps = -r{toxinidir}/test-requirements-debian.txt -commands = - pylint {posargs} dcmanagerclient --rcfile=./pylint.rc +[flake8] +# E203 whitespace before ':' +# W503 line break before binary operator +# W504 line break after binary operator +# W605 invalid escape sequence +# E203 and W503 are not compatible with Black formatter +show-source = True +max-line-length = 85 +ignore = E203, W503, W504, W605 +builtins = _ +exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build -[testenv:venv] -basepython = python3 -commands = {posargs} +[testenv:pylint] +commands = + pylint {posargs} dcmanagerclient --rcfile=./.pylintrc [testenv:cover] setenv = PYTHON=coverage run --parallel-mode PYTHONDONTWRITEBYTECODE=True -deps = -r{toxinidir}/test-requirements-debian.txt - coverage commands = find {toxinidir} -not -path '{toxinidir}/.tox/*' -name '*.py[c|o]' -delete coverage erase @@ -61,51 +65,4 @@ commands = coverage report [testenv:debug] -basepython = python3 commands = oslo_debug_helper {posargs} - -[flake8] -# E123, E125 skipped as they are invalid PEP-8. -# W504 line break after binary operator -# W605 invalid escape sequence -show-source = True -ignore = E123,E125,W504,W605,H102 -builtins = _ -exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build - -[testenv:linters] -basepython = python3 -# bashate ignore: -# E006 - accept long lines -# E040 - false positive on |& syntax (new in bash 4) -allowlist_externals = bash -commands = - bash -c "find {toxinidir} \ - \( -name .tox -prune \) \ - -o -type f -name '*.yaml' \ - -print0 | xargs -0 yamllint" - -[testenv:docs] -basepython = python3 -deps = -r{toxinidir}/doc/requirements.txt -commands = - rm -rf doc/build - sphinx-build -a -E -W -d doc/build/doctrees -b html doc/source doc/build/html -allowlist_externals = rm - -[testenv:releasenotes] -basepython = python3 -deps = -r{toxinidir}/doc/requirements.txt -commands = - rm -rf releasenotes/build - sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html -allowlist_externals = - rm - reno - -[testenv:newnote] -basepython = python3 -# Re-use the releasenotes venv -envdir = {toxworkdir}/releasenotes -deps = -r{toxinidir}/doc/requirements.txt -commands = reno new {posargs} diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index e47d169..0000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT -stevedore>=1.20.0 # Apache-2.0 diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 18d4eaa..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -PyYAML>=3.1.0 -yamllint>=0.5.2 -mock>=2.0 # BSD -isort<5;python_version>="3.5" -bandit;python_version>="3.5" diff --git a/tox.ini b/tox.ini index e4ebc17..4d7da7e 100644 --- a/tox.ini +++ b/tox.ini @@ -4,8 +4,9 @@ minversion = 2.3 skipsdist = True [testenv] +basepython = python3 install_command = pip install \ - -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/stable/stein/upper-constraints.txt} \ + -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \ {opts} {packages} setenv = VIRTUAL_ENV={envdir} @@ -15,84 +16,51 @@ setenv = CURRENT_CFG_FILE={toxinidir}/.current.cfg commands = find {toxinidir} -type f -not -path '{toxinidir}/.tox/*' -not -path '*/__pycache__/*' -name '*.py[c|o]' -delete -deps = -r{toxinidir}/test-requirements.txt - -r{toxinidir}/requirements.txt - keyring - allowlist_externals = rm find reno -[testenv:venv] -basepython = python3 -commands = {posargs} - -[testenv:flake8] -basepython = python3 -description = Dummy environment to allow flake8 to be run in subdir tox - -[testenv:pep8_Debian] -basepython = python3 -description = Dummy environment to allow pep8 to be run in subdir tox - [testenv:pep8] -basepython = python3 description = Dummy environment to allow pep8 to be run in subdir tox [testenv:pylint] -basepython = python3 description = Dummy environment to allow pylint to be run in subdir tox -[testenv:pylint_Debian] -basepython = python3 -description = Dummy environment to allow pylint to be run in subdir tox - -[testenv:linters] -basepython = python3 -allowlist_externals = bash -commands = - bash -c "find {toxinidir} \ - \( -name .tox -prune \) \ - -o -type f -name '*.yaml' \ - -print0 | xargs -0 yamllint" - [testenv:docs] -basepython = python3 deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf doc/build sphinx-build -a -E -W -d doc/build/doctrees -b html doc/source doc/build/html -allowlist_externals = rm [testenv:releasenotes] -basepython = python3 deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html -allowlist_externals = - rm - reno [testenv:newnote] -basepython = python3 # Re-use the releasenotes venv envdir = {toxworkdir}/releasenotes deps = -r{toxinidir}/doc/requirements.txt commands = reno --rel-notes-dir {toxinidir}/releasenotes new {posargs} [testenv:api-ref] -basepython = python3 -deps = - -r{toxinidir}/doc/requirements.txt +deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf api-ref/build sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html -allowlist_externals = rm [testenv:bandit] -basepython = python3 description = Bandit code scan for *.py files under config folder -deps = -r{toxinidir}/test-requirements.txt +deps = bandit commands = bandit -r {toxinidir}/ -x '**/.tox/**,**/.eggs/**' -lll + +[testenv:linters] +allowlist_externals = bash +deps = yamllint +commands = + bash -c "find {toxinidir} \ + \( -name .tox -prune \) \ + -o -type f -name '*.yaml' \ + -print0 | xargs -0 yamllint"