From 7f3827f24d2fb3cb546d3caf71d505d23187b0dc Mon Sep 17 00:00:00 2001 From: Tao Liu Date: Thu, 12 Mar 2020 09:46:29 -0400 Subject: [PATCH] Keystone token and resource caching Add the following misc. changes to dcorch and dcmanager components: - Cache the master resource in dcorch audit - Consolidate the openstack drivers to common module, combine the dcmanager and dcorch sysinv client. (Note: the sdk driver that used by nova, neutron and cinder will be cleaned as part of story 2006588). - Update the common sdk driver: . in order to avoid creating new keystone client multiple times . to add a option for caching region clients, in addition to the keystone client . finally, to randomize the token early renewal duration - Change subcloud audit manager, patch audit manager, and sw update manager to: utilize the sdk driver which caches the keystone client and token Test cases: 1. Manage/unmanage subclouds 2. Platform resources sync and audit 3. Verify the keystone token is cached until the token is expired 4. Add/delete subclouds 5. Managed subcloud goes offline/online (power off/on) 6. Managed subcloud goes offline/online (delete/add a static route) 7. Apply a patch to all subclouds via patch Orchestration Story: 2007267 Task: 38865 Change-Id: I75e0cf66a797a65faf75e7c64dafb07f54c2df06 Signed-off-by: Tao Liu --- centos_iso_image.inc | 1 + distributedcloud/.testr.conf | 1 + distributedcloud/centos/distributedcloud.spec | 13 + distributedcloud/dccommon/LICENSE | 176 ++++++++++++++ distributedcloud/dccommon/__init__.py | 25 ++ distributedcloud/dccommon/consts.py | 38 +++ .../drivers/README.rst | 0 .../drivers/__init__.py | 0 .../{dcmanager => dccommon}/drivers/base.py | 8 +- .../drivers/openstack/__init__.py | 0 .../drivers/openstack/fm.py | 17 +- .../drivers/openstack/keystone_v3.py | 12 +- .../drivers/openstack/patching_v1.py | 4 +- .../drivers/openstack/sdk_platform.py | 222 +++++++++++++++++ .../drivers/openstack/sysinv_v1.py | 225 +++++++++++++----- .../drivers/openstack/vim.py | 9 +- .../common => dccommon}/endpoint_cache.py | 9 +- distributedcloud/dccommon/exceptions.py | 105 ++++++++ .../drivers => dccommon/tests}/__init__.py | 0 distributedcloud/dccommon/tests/base.py | 32 +++ .../dccommon/tests/unit/__init__.py | 0 .../dccommon/tests/unit/drivers/__init__.py | 0 .../tests/unit/drivers/test_keystone_v3.py | 11 +- .../tests/unit/drivers/test_sdk_platform.py | 45 ++++ .../tests/unit/drivers/test_sysinv_v1.py | 15 +- .../tests/unit}/test_endpoint_cache.py | 25 +- distributedcloud/dccommon/tests/utils.py | 59 +++++ .../dcmanager/api/controllers/v1/subclouds.py | 9 +- .../api/controllers/v1/sw_update_options.py | 3 +- distributedcloud/dcmanager/common/consts.py | 2 - .../dcmanager/common/exceptions.py | 4 - distributedcloud/dcmanager/common/utils.py | 8 +- .../versions/001_first_version.py | 4 +- .../dcmanager/drivers/openstack/sysinv_v1.py | 163 ------------- .../dcmanager/manager/patch_audit_manager.py | 29 ++- .../manager/subcloud_audit_manager.py | 20 +- .../dcmanager/manager/subcloud_install.py | 4 +- .../dcmanager/manager/subcloud_manager.py | 5 +- .../dcmanager/manager/sw_update_manager.py | 21 +- .../unit/manager/test_patch_audit_manager.py | 22 +- .../manager/test_subcloud_audit_manager.py | 8 +- distributedcloud/dcmanager/tests/utils.py | 25 -- .../dcorch/api/proxy/apps/filter.py | 4 +- .../dcorch/api/proxy/common/utils.py | 11 +- distributedcloud/dcorch/common/consts.py | 15 -- distributedcloud/dcorch/common/exceptions.py | 24 -- .../dcorch/drivers/openstack/sdk.py | 15 +- .../dcorch/drivers/openstack/sdk_platform.py | 181 -------------- .../dcorch/engine/alarm_aggregate_manager.py | 14 +- .../dcorch/engine/fernet_key_manager.py | 18 +- .../dcorch/engine/quota_manager.py | 14 +- distributedcloud/dcorch/engine/service.py | 5 +- distributedcloud/dcorch/engine/subcloud.py | 3 +- .../dcorch/engine/sync_services/compute.py | 7 +- .../dcorch/engine/sync_services/identity.py | 10 +- .../dcorch/engine/sync_services/network.py | 7 +- .../dcorch/engine/sync_services/sysinv.py | 23 +- .../dcorch/engine/sync_services/volume.py | 7 +- distributedcloud/dcorch/engine/sync_thread.py | 49 ++-- distributedcloud/dcorch/snmp/controller.py | 4 +- distributedcloud/setup.cfg | 1 + distributedcloud/tox.ini | 2 +- 62 files changed, 1147 insertions(+), 646 deletions(-) create mode 100644 distributedcloud/dccommon/LICENSE create mode 100644 distributedcloud/dccommon/__init__.py create mode 100644 distributedcloud/dccommon/consts.py rename distributedcloud/{dcmanager => dccommon}/drivers/README.rst (100%) rename distributedcloud/{dcmanager => dccommon}/drivers/__init__.py (100%) rename distributedcloud/{dcmanager => dccommon}/drivers/base.py (78%) rename distributedcloud/{dcmanager => dccommon}/drivers/openstack/__init__.py (100%) rename distributedcloud/{dcorch => dccommon}/drivers/openstack/fm.py (76%) rename distributedcloud/{dcorch => dccommon}/drivers/openstack/keystone_v3.py (93%) rename distributedcloud/{dcmanager => dccommon}/drivers/openstack/patching_v1.py (98%) create mode 100644 distributedcloud/dccommon/drivers/openstack/sdk_platform.py rename distributedcloud/{dcorch => dccommon}/drivers/openstack/sysinv_v1.py (68%) rename distributedcloud/{dcmanager => dccommon}/drivers/openstack/vim.py (96%) rename distributedcloud/{dcorch/common => dccommon}/endpoint_cache.py (96%) create mode 100644 distributedcloud/dccommon/exceptions.py rename distributedcloud/{dcmanager/tests/unit/drivers => dccommon/tests}/__init__.py (100%) create mode 100644 distributedcloud/dccommon/tests/base.py create mode 100644 distributedcloud/dccommon/tests/unit/__init__.py create mode 100644 distributedcloud/dccommon/tests/unit/drivers/__init__.py rename distributedcloud/{dcmanager => dccommon}/tests/unit/drivers/test_keystone_v3.py (95%) create mode 100644 distributedcloud/dccommon/tests/unit/drivers/test_sdk_platform.py rename distributedcloud/{dcmanager => dccommon}/tests/unit/drivers/test_sysinv_v1.py (93%) rename distributedcloud/{dcmanager/tests/unit/common => dccommon/tests/unit}/test_endpoint_cache.py (87%) create mode 100644 distributedcloud/dccommon/tests/utils.py delete mode 100644 distributedcloud/dcmanager/drivers/openstack/sysinv_v1.py delete mode 100644 distributedcloud/dcorch/drivers/openstack/sdk_platform.py diff --git a/centos_iso_image.inc b/centos_iso_image.inc index 45ace887c..afe3256c0 100644 --- a/centos_iso_image.inc +++ b/centos_iso_image.inc @@ -1,4 +1,5 @@ # distributedcloud +distributedcloud-dccommon distributedcloud-dcmanager distributedcloud-dcorch distributedcloud-dcdbsync diff --git a/distributedcloud/.testr.conf b/distributedcloud/.testr.conf index 7c167d762..f0138c0c7 100644 --- a/distributedcloud/.testr.conf +++ b/distributedcloud/.testr.conf @@ -6,6 +6,7 @@ test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} PYTHON=$(echo ${PYTHON:-python} | sed 's/--source distributedcloud//g') + ${PYTHON} -m subunit.run discover -s dccommon $LISTOPT $IDOPTION ${PYTHON} -m subunit.run discover -s dcmanager $LISTOPT $IDOPTION ${PYTHON} -m subunit.run discover -s dcorch $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE diff --git a/distributedcloud/centos/distributedcloud.spec b/distributedcloud/centos/distributedcloud.spec index ab57ca268..51b5e7ccb 100644 --- a/distributedcloud/centos/distributedcloud.spec +++ b/distributedcloud/centos/distributedcloud.spec @@ -70,6 +70,13 @@ BuildRequires: python-babel %description Distributed Cloud provides configuration and management of distributed clouds +# DC Common +%package dccommon +Summary: DC common module + +%description dccommon +Distributed Cloud Common Module + # DC Manager %package dcmanager Summary: DC Manager @@ -163,6 +170,12 @@ install -p -D -m 640 %{_builddir}/%{pypi_name}-%{version}%{_sysconfdir}/dcdbsync # install ansible overrides dir install -d -m 600 ${RPM_BUILD_ROOT}/opt/dc/ansible +%files dccommon +%license LICENSE +%{python2_sitelib}/dccommon* +%{python2_sitelib}/distributedcloud-*.egg-info +%exclude %{python2_sitelib}/dccommon/tests + %files dcmanager %license LICENSE %{python2_sitelib}/dcmanager* diff --git a/distributedcloud/dccommon/LICENSE b/distributedcloud/dccommon/LICENSE new file mode 100644 index 000000000..68c771a09 --- /dev/null +++ b/distributedcloud/dccommon/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/distributedcloud/dccommon/__init__.py b/distributedcloud/dccommon/__init__.py new file mode 100644 index 000000000..8b398cff5 --- /dev/null +++ b/distributedcloud/dccommon/__init__.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# + +import pbr.version + + +__version__ = pbr.version.VersionInfo('distributedcloud').version_string() diff --git a/distributedcloud/dccommon/consts.py b/distributedcloud/dccommon/consts.py new file mode 100644 index 000000000..5bc016bff --- /dev/null +++ b/distributedcloud/dccommon/consts.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# + +SECONDS_IN_HOUR = 3600 + +KS_ENDPOINT_ADMIN = "admin" +KS_ENDPOINT_INTERNAL = "internal" +KS_ENDPOINT_DEFAULT = KS_ENDPOINT_INTERNAL + +ENDPOINT_TYPE_IDENTITY_OS = "identity_openstack" + +# openstack endpoint types +ENDPOINT_TYPES_LIST_OS = [ENDPOINT_TYPE_IDENTITY_OS] + +# distributed Cloud constants +CLOUD_0 = "RegionOne" +VIRTUAL_MASTER_CLOUD = "SystemController" + +SW_UPDATE_DEFAULT_TITLE = "all clouds default" + +USER_HEADER_VALUE = "distcloud" +USER_HEADER = {'User-Header': USER_HEADER_VALUE} diff --git a/distributedcloud/dcmanager/drivers/README.rst b/distributedcloud/dccommon/drivers/README.rst similarity index 100% rename from distributedcloud/dcmanager/drivers/README.rst rename to distributedcloud/dccommon/drivers/README.rst diff --git a/distributedcloud/dcmanager/drivers/__init__.py b/distributedcloud/dccommon/drivers/__init__.py similarity index 100% rename from distributedcloud/dcmanager/drivers/__init__.py rename to distributedcloud/dccommon/drivers/__init__.py diff --git a/distributedcloud/dcmanager/drivers/base.py b/distributedcloud/dccommon/drivers/base.py similarity index 78% rename from distributedcloud/dcmanager/drivers/base.py rename to distributedcloud/dccommon/drivers/base.py index f6b501e1d..d97aff3b9 100644 --- a/distributedcloud/dcmanager/drivers/base.py +++ b/distributedcloud/dccommon/drivers/base.py @@ -9,7 +9,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - +# +# Copyright (c) 2017-2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# """ Base class for all drivers. """ diff --git a/distributedcloud/dcmanager/drivers/openstack/__init__.py b/distributedcloud/dccommon/drivers/openstack/__init__.py similarity index 100% rename from distributedcloud/dcmanager/drivers/openstack/__init__.py rename to distributedcloud/dccommon/drivers/openstack/__init__.py diff --git a/distributedcloud/dcorch/drivers/openstack/fm.py b/distributedcloud/dccommon/drivers/openstack/fm.py similarity index 76% rename from distributedcloud/dcorch/drivers/openstack/fm.py rename to distributedcloud/dccommon/drivers/openstack/fm.py index 11dd5783d..ac97160d0 100644 --- a/distributedcloud/dcorch/drivers/openstack/fm.py +++ b/distributedcloud/dccommon/drivers/openstack/fm.py @@ -10,16 +10,22 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2018 Wind River Systems, Inc. # - +# Copyright (c) 2018-2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# from oslo_log import log import fmclient -from dcorch.common import exceptions -from dcorch.drivers import base +from dccommon import consts as dccommon_consts +from dccommon.drivers import base +from dccommon import exceptions + LOG = log.getLogger(__name__) API_VERSION = '1' @@ -28,7 +34,8 @@ API_VERSION = '1' class FmClient(base.DriverBase): """Fault Management driver.""" - def __init__(self, region, session, endpoint_type): + def __init__(self, region, session, + endpoint_type=dccommon_consts.KS_ENDPOINT_DEFAULT): self.region_name = region try: self.fm = fmclient.Client(API_VERSION, diff --git a/distributedcloud/dcorch/drivers/openstack/keystone_v3.py b/distributedcloud/dccommon/drivers/openstack/keystone_v3.py similarity index 93% rename from distributedcloud/dcorch/drivers/openstack/keystone_v3.py rename to distributedcloud/dccommon/drivers/openstack/keystone_v3.py index 1204c3d9c..a33d53a87 100644 --- a/distributedcloud/dcorch/drivers/openstack/keystone_v3.py +++ b/distributedcloud/dccommon/drivers/openstack/keystone_v3.py @@ -12,14 +12,20 @@ # License for the specific language governing permissions and limitations # under the License. # +# Copyright (c) 2017-2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# from keystoneauth1 import exceptions as keystone_exceptions from keystoneclient.v3.contrib import endpoint_filter from oslo_utils import importutils -from dcorch.common.endpoint_cache import EndpointCache -from dcorch.common import exceptions -from dcorch.drivers import base +from dccommon.drivers import base +from dccommon.endpoint_cache import EndpointCache +from dccommon import exceptions # Ensure keystonemiddleware options are imported importutils.import_module('keystonemiddleware.auth_token') diff --git a/distributedcloud/dcmanager/drivers/openstack/patching_v1.py b/distributedcloud/dccommon/drivers/openstack/patching_v1.py similarity index 98% rename from distributedcloud/dcmanager/drivers/openstack/patching_v1.py rename to distributedcloud/dccommon/drivers/openstack/patching_v1.py index 073afbe5f..26598773a 100644 --- a/distributedcloud/dcmanager/drivers/openstack/patching_v1.py +++ b/distributedcloud/dccommon/drivers/openstack/patching_v1.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -23,7 +23,7 @@ from oslo_log import log import requests from requests_toolbelt import MultipartEncoder -from dcmanager.drivers import base +from dccommon.drivers import base LOG = log.getLogger(__name__) diff --git a/distributedcloud/dccommon/drivers/openstack/sdk_platform.py b/distributedcloud/dccommon/drivers/openstack/sdk_platform.py new file mode 100644 index 000000000..9bbb8010c --- /dev/null +++ b/distributedcloud/dccommon/drivers/openstack/sdk_platform.py @@ -0,0 +1,222 @@ +# Copyright 2017-2020 Wind River Inc + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +OpenStack Driver +""" +import collections +import random + +from oslo_concurrency import lockutils +from oslo_log import log +from oslo_utils import timeutils + +from dccommon import consts +from dccommon.drivers.openstack.fm import FmClient +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient +from dccommon import exceptions + + +# Gap, in seconds, to determine whether the given token is about to expire +# These values are used to randomize the token early renewal duration and +# to distribute the new keystone creation to different audit cycles +STALE_TOKEN_DURATION_MIN = 40 +STALE_TOKEN_DURATION_MAX = 120 +STALE_TOKEN_DURATION_STEP = 20 + +KEYSTONE_CLIENT_NAME = 'keystone' +SYSINV_CLIENT_NAME = 'sysinv' +FM_CLIENT_NAME = 'fm' + +LOG = log.getLogger(__name__) + +LOCK_NAME = 'dc-openstackdriver-platform' + +SUPPORTED_REGION_CLIENTS = [ + SYSINV_CLIENT_NAME, + FM_CLIENT_NAME +] + +# region client type and class mappings +region_client_class_map = { + SYSINV_CLIENT_NAME: SysinvClient, + FM_CLIENT_NAME: FmClient, +} + + +class OpenStackDriver(object): + + os_clients_dict = collections.defaultdict(dict) + _identity_tokens = {} + + def __init__(self, region_name=consts.CLOUD_0, thread_name='dcorch', + auth_url=None, region_clients=SUPPORTED_REGION_CLIENTS): + # Check if objects are cached and try to use those + self.region_name = region_name + self.keystone_client = None + self.sysinv_client = None + self.fm_client = None + + if region_clients: + # check if the requested clients are in the supported client list + result = all(c in SUPPORTED_REGION_CLIENTS for c in region_clients) + if not result: + message = ("Requested clients are not supported: %s" % + ' '.join(region_clients)) + LOG.error(message) + raise exceptions.InvalidInputError + + self.get_cached_keystone_client(region_name) + if self.keystone_client is None: + LOG.info("get new keystone client for subcloud %s", region_name) + try: + self.keystone_client = KeystoneClient(region_name, auth_url) + OpenStackDriver.update_region_clients(region_name, + KEYSTONE_CLIENT_NAME, + self.keystone_client) + except Exception as exception: + LOG.error('keystone_client region %s error: %s' % + (region_name, exception.message)) + raise exception + + if region_clients: + self.get_cached_region_clients_for_thread(region_name, + thread_name, + region_clients) + for client_name in region_clients: + client_obj_name = client_name + '_client' + if getattr(self, client_obj_name) is None: + # Create new client object and cache it + try: + client_object = region_client_class_map[client_name]( + region_name, self.keystone_client.session) + setattr(self, client_obj_name, client_object) + OpenStackDriver.update_region_clients(region_name, + client_name, + client_object, + thread_name) + except Exception as exception: + LOG.error('Region %s client %s thread %s error: %s' % + (region_name, client_name, thread_name, + exception.message)) + raise exception + + @lockutils.synchronized(LOCK_NAME) + def get_cached_keystone_client(self, region_name): + if ((region_name in OpenStackDriver.os_clients_dict) and + (KEYSTONE_CLIENT_NAME in + OpenStackDriver.os_clients_dict[region_name]) and + self._is_token_valid(region_name)): + self.keystone_client = (OpenStackDriver.os_clients_dict + [region_name][KEYSTONE_CLIENT_NAME]) + + @lockutils.synchronized(LOCK_NAME) + def get_cached_region_clients_for_thread(self, region_name, thread_name, + clients): + if ((region_name in OpenStackDriver.os_clients_dict) and + (thread_name in OpenStackDriver.os_clients_dict[ + region_name])): + for client in clients: + if client in (OpenStackDriver.os_clients_dict[region_name] + [thread_name]): + LOG.debug('Using cached OS %s client objects %s %s' % + (client, region_name, thread_name)) + client_obj = (OpenStackDriver.os_clients_dict[region_name] + [thread_name][client]) + setattr(self, client + '_client', client_obj) + else: + OpenStackDriver.os_clients_dict[region_name][thread_name] = {} + + @classmethod + @lockutils.synchronized(LOCK_NAME) + def update_region_clients(cls, region_name, client_name, client_object, + thread_name=None): + if thread_name is not None: + cls.os_clients_dict[region_name][thread_name][client_name] = \ + client_object + else: + cls.os_clients_dict[region_name][client_name] = client_object + + @classmethod + @lockutils.synchronized(LOCK_NAME) + def delete_region_clients(cls, region_name, clear_token=False): + LOG.warn("delete_region_clients=%s, clear_token=%s" % + (region_name, clear_token)) + if region_name in cls.os_clients_dict: + del cls.os_clients_dict[region_name] + if clear_token: + cls._identity_tokens[region_name] = None + + @classmethod + @lockutils.synchronized(LOCK_NAME) + def delete_region_clients_for_thread(cls, region_name, thread_name): + LOG.debug("delete_region_clients=%s, thread_name=%s" % + (region_name, thread_name)) + if (region_name in cls.os_clients_dict and + thread_name in cls.os_clients_dict[region_name]): + del cls.os_clients_dict[region_name][thread_name] + + def _is_token_valid(self, region_name): + try: + keystone = \ + OpenStackDriver.os_clients_dict[region_name]['keystone'].\ + keystone_client + if (not OpenStackDriver._identity_tokens + or region_name not in OpenStackDriver._identity_tokens + or not OpenStackDriver._identity_tokens[region_name]): + OpenStackDriver._identity_tokens[region_name] = \ + keystone.tokens.validate(keystone.session.get_token(), + include_catalog=False) + LOG.info("Token for subcloud %s expires_at=%s" % + (region_name, + OpenStackDriver._identity_tokens[region_name] + ['expires_at'])) + else: + token = keystone.tokens.validate( + OpenStackDriver._identity_tokens[region_name], + include_catalog=False) + if token != OpenStackDriver._identity_tokens[region_name]: + LOG.debug("%s: updating token %s to %s" % + (region_name, + OpenStackDriver._identity_tokens[region_name], + token)) + OpenStackDriver._identity_tokens[region_name] = token + + except Exception as exception: + LOG.info('_is_token_valid handle: %s', exception.message) + # Reset the cached dictionary + OpenStackDriver.os_clients_dict[region_name] = \ + collections.defaultdict(dict) + OpenStackDriver._identity_tokens[region_name] = None + return False + + expiry_time = timeutils.normalize_time(timeutils.parse_isotime( + self._identity_tokens[region_name]['expires_at'])) + duration = random.randrange(STALE_TOKEN_DURATION_MIN, + STALE_TOKEN_DURATION_MAX, + STALE_TOKEN_DURATION_STEP) + if timeutils.is_soon(expiry_time, duration): + LOG.info("The cached keystone token for subcloud %s " + "will expire soon %s" % + (region_name, + OpenStackDriver._identity_tokens[region_name] + ['expires_at'])) + # Reset the cached dictionary + OpenStackDriver.os_clients_dict[region_name] = \ + collections.defaultdict(dict) + OpenStackDriver._identity_tokens[region_name] = None + return False + else: + return True diff --git a/distributedcloud/dcorch/drivers/openstack/sysinv_v1.py b/distributedcloud/dccommon/drivers/openstack/sysinv_v1.py similarity index 68% rename from distributedcloud/dcorch/drivers/openstack/sysinv_v1.py rename to distributedcloud/dccommon/drivers/openstack/sysinv_v1.py index 3ed09d04a..af1b69110 100644 --- a/distributedcloud/dcorch/drivers/openstack/sysinv_v1.py +++ b/distributedcloud/dccommon/drivers/openstack/sysinv_v1.py @@ -1,3 +1,5 @@ +# Copyright 2016 Ericsson AB + # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at @@ -9,10 +11,16 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copyright (c) 2017-2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# import hashlib -from cgtsclient import client as cgts_client from cgtsclient.exc import HTTPConflict from cgtsclient.exc import HTTPNotFound from cgtsclient.v1.icommunity import CREATION_ATTRIBUTES \ @@ -20,14 +28,14 @@ from cgtsclient.v1.icommunity import CREATION_ATTRIBUTES \ from cgtsclient.v1.itrapdest import CREATION_ATTRIBUTES \ as SNMP_TRAPDEST_CREATION_ATTRIBUTES from oslo_log import log + from sysinv.common import constants as sysinv_constants -from dcorch.common import consts -from dcorch.common import exceptions -from dcorch.drivers import base +from dccommon.drivers import base +from dccommon import exceptions + LOG = log.getLogger(__name__) - API_VERSION = '1' @@ -49,43 +57,141 @@ def make_sysinv_patch(update_dict): class SysinvClient(base.DriverBase): """Sysinv V1 driver.""" - def __init__(self, region_name, session): - self._expired = False - self.api_version = API_VERSION - self.region_name = region_name - self.session = session - - self.client = self.update_client( - self.api_version, self.region_name, self.session) - - def update_client(self, api_version, region_name, session): + def __init__(self, region, session): try: - endpoint = self.session.get_endpoint( - service_type=consts.ENDPOINT_TYPE_PLATFORM, - interface=consts.KS_ENDPOINT_INTERNAL, - region_name=region_name) + # TOX cannot import cgts_client and all the dependencies therefore + # the client is being lazy loaded since TOX doesn't actually + # require the cgtsclient module. + from cgtsclient import client + + # The sysinv client doesn't support a session, so we need to + # get an endpoint and token. + endpoint = session.get_endpoint(service_type='platform', + region_name=region, + interface='internal') token = session.get_token() - client = cgts_client.Client( - api_version, - username=session.auth._username, - password=session.auth._password, - tenant_name=session.auth._project_name, - auth_url=session.auth.auth_url, - endpoint=endpoint, - token=token) + + self.sysinv_client = client.Client(API_VERSION, + endpoint=endpoint, + token=token) + self.region_name = region except exceptions.ServiceUnavailable: raise - self._expired = False + def get_controller_hosts(self): + """Get a list of controller hosts.""" + return self.sysinv_client.ihost.list_personality( + sysinv_constants.CONTROLLER) - return client + def get_management_interface(self, hostname): + """Get the management interface for a host.""" + interfaces = self.sysinv_client.iinterface.list(hostname) + for interface in interfaces: + interface_networks = self.sysinv_client.interface_network.\ + list_by_interface(interface.uuid) + for if_net in interface_networks: + if if_net.network_type == sysinv_constants.NETWORK_TYPE_MGMT: + return interface + + # This can happen if the host is still being installed and has not + # yet created its management interface. + LOG.warning("Management interface on host %s not found" % hostname) + return None + + def get_management_address_pool(self): + """Get the management address pool for a host.""" + networks = self.sysinv_client.network.list() + for network in networks: + if network.type == sysinv_constants.NETWORK_TYPE_MGMT: + address_pool_uuid = network.pool_uuid + break + else: + LOG.error("Management address pool not found") + raise exceptions.InternalError() + + return self.sysinv_client.address_pool.get(address_pool_uuid) + + def get_oam_addresses(self): + """Get the oam address pool for a host.""" + iextoam_object = self.sysinv_client.iextoam.list() + if iextoam_object is not None and len(iextoam_object) != 0: + return iextoam_object[0] + else: + LOG.error("OAM address not found") + raise exceptions.OAMAddressesNotFound() + + def create_route(self, interface_uuid, network, prefix, gateway, metric): + """Create a static route on an interface.""" + + LOG.info("Creating route: interface: %s dest: %s/%s " + "gateway: %s metric %s" % (interface_uuid, network, + prefix, gateway, metric)) + self.sysinv_client.route.create(interface_uuid=interface_uuid, + network=network, + prefix=prefix, + gateway=gateway, + metric=metric) + + def delete_route(self, interface_uuid, network, prefix, gateway, metric): + """Delete a static route.""" + + # Get the routes for this interface + routes = self.sysinv_client.route.list_by_interface(interface_uuid) + for route in routes: + if (route.network == network and route.prefix == prefix and + route.gateway == gateway and route.metric == metric): + LOG.info("Deleting route: interface: %s dest: %s/%s " + "gateway: %s metric %s" % (interface_uuid, network, + prefix, gateway, metric)) + self.sysinv_client.route.delete(route.uuid) + return + + LOG.warning("Route not found: interface: %s dest: %s/%s gateway: %s " + "metric %s" % (interface_uuid, network, prefix, gateway, + metric)) + + def get_service_groups(self): + """Get a list of service groups.""" + return self.sysinv_client.sm_servicegroup.list() + + def get_loads(self): + """Get a list of loads.""" + return self.sysinv_client.load.list() + + def get_applications(self): + """Get a list of containerized applications""" + + # Get a list of containerized applications the system knows of + return self.sysinv_client.app.list() + + def get_system(self): + """Get the system.""" + systems = self.sysinv_client.isystem.list() + return systems[0] + + def get_service_parameters(self, name, value): + """Get service parameters for a given name.""" + opts = [] + opt = dict() + opt['field'] = name + opt['value'] = value + opt['op'] = 'eq' + opt['type'] = '' + opts.append(opt) + parameters = self.sysinv_client.service_parameter.list(q=opts) + return parameters + + def get_registry_image_tags(self, image_name): + """Get the image tags for an image from the local registry""" + image_tags = self.sysinv_client.registry_image.tags(image_name) + return image_tags def get_dns(self): """Get the dns nameservers for this region :return: dns """ - idnss = self.client.idns.list() + idnss = self.sysinv_client.idns.list() if not idnss: LOG.info("dns is None for region: %s" % self.region_name) return None @@ -115,14 +221,14 @@ class SysinvClient(base.DriverBase): 'action': 'apply'}) LOG.info("region={} dns update uuid={} patch={}".format( self.region_name, idns.uuid, patch)) - idns = self.client.idns.update(idns.uuid, patch) + idns = self.sysinv_client.idns.update(idns.uuid, patch) else: LOG.info("update_dns no changes, skip dns region={} " "update uuid={} nameservers={}".format( self.region_name, idns.uuid, nameservers)) except Exception as e: LOG.error("update_dns exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return idns @@ -131,7 +237,7 @@ class SysinvClient(base.DriverBase): :return: itrapdests list of itrapdest """ - itrapdests = self.client.itrapdest.list() + itrapdests = self.sysinv_client.itrapdest.list() return itrapdests def snmp_trapdest_create(self, trapdest_dict): @@ -153,7 +259,8 @@ class SysinvClient(base.DriverBase): "trapdest_create_dict={}".format( self.region_name, trapdest_create_dict)) try: - itrapdest = self.client.itrapdest.create(**trapdest_create_dict) + itrapdest = self.sysinv_client.itrapdest.create( + **trapdest_create_dict) except HTTPConflict: LOG.info("snmp_trapdest_create exists region={}" "trapdest_dict={}".format( @@ -169,7 +276,7 @@ class SysinvClient(base.DriverBase): break except Exception as e: LOG.error("snmp_trapdest_create exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return itrapdest @@ -181,7 +288,7 @@ class SysinvClient(base.DriverBase): try: LOG.info("snmp_trapdest_delete region {} ip_address: {}".format( self.region_name, trapdest_ip_address)) - self.client.itrapdest.delete(trapdest_ip_address) + self.sysinv_client.itrapdest.delete(trapdest_ip_address) except HTTPNotFound: LOG.info("snmp_trapdest_delete NotFound {} for region: {}".format( trapdest_ip_address, self.region_name)) @@ -189,14 +296,14 @@ class SysinvClient(base.DriverBase): ip_address=trapdest_ip_address) except Exception as e: LOG.error("snmp_trapdest_delete exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e def snmp_community_list(self): """Get the community list for this region :return: icommunitys list of icommunity """ - icommunitys = self.client.icommunity.list() + icommunitys = self.sysinv_client.icommunity.list() return icommunitys def snmp_community_create(self, community_dict): @@ -217,7 +324,8 @@ class SysinvClient(base.DriverBase): "community_create_dict={}".format( self.region_name, community_create_dict)) try: - icommunity = self.client.icommunity.create(**community_create_dict) + icommunity = self.sysinv_client.icommunity.create( + **community_create_dict) except HTTPConflict: LOG.info("snmp_community_create exists region={}" "community_dict={}".format( @@ -233,7 +341,7 @@ class SysinvClient(base.DriverBase): break except Exception as e: LOG.error("snmp_community_create exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return icommunity @@ -245,7 +353,7 @@ class SysinvClient(base.DriverBase): try: LOG.info("snmp_community_delete region {} community: {}".format( self.region_name, community)) - self.client.icommunity.delete(community) + self.sysinv_client.icommunity.delete(community) except HTTPNotFound: LOG.info("snmp_community_delete NotFound {} for region: {}".format( community, self.region_name)) @@ -253,7 +361,7 @@ class SysinvClient(base.DriverBase): community=community) except Exception as e: LOG.error("snmp_community_delete exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e def get_certificates(self): """Get the certificates for this region @@ -262,11 +370,11 @@ class SysinvClient(base.DriverBase): """ try: - certificates = self.client.certificate.list() + certificates = self.sysinv_client.certificate.list() except Exception as e: LOG.error("get_certificates region={} " "exception={}".format(self.region_name, e)) - raise exceptions.SyncRequestFailedRetry() + raise e if not certificates: LOG.info("No certificates in region: {}".format( @@ -347,10 +455,10 @@ class SysinvClient(base.DriverBase): (signature.startswith(sysinv_constants.CERT_MODE_SSL) or (signature.startswith(sysinv_constants.CERT_MODE_TPM)))): # ensure https is enabled - isystem = self.client.isystem.list()[0] + isystem = self.sysinv_client.isystem.list()[0] https_enabled = isystem.capabilities.get('https_enabled', False) if not https_enabled: - isystem = self.client.isystem.update( + isystem = self.sysinv_client.isystem.update( isystem.uuid, [{"path": "/https_enabled", "value": "true", @@ -359,14 +467,14 @@ class SysinvClient(base.DriverBase): self.region_name, isystem.uuid)) try: - icertificate = self.client.certificate.certificate_install( + icertificate = self.sysinv_client.certificate.certificate_install( certificate, data) LOG.info("update_certificate region={} signature={}".format( self.region_name, signature)) except Exception as e: LOG.error("update_certificate exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return icertificate @@ -378,7 +486,8 @@ class SysinvClient(base.DriverBase): try: LOG.info(" delete_certificate region {} certificate: {}".format( self.region_name, certificate.signature)) - self.client.certificate.certificate_uninstall(certificate.uuid) + self.sysinv_client.certificate.certificate_uninstall( + certificate.uuid) except HTTPNotFound: LOG.info("delete_certificate NotFound {} for region: {}".format( certificate.signature, self.region_name)) @@ -390,7 +499,7 @@ class SysinvClient(base.DriverBase): :return: iuser """ - iusers = self.client.iuser.list() + iusers = self.sysinv_client.iuser.list() if not iusers: LOG.info("user is None for region: %s" % self.region_name) return None @@ -423,14 +532,14 @@ class SysinvClient(base.DriverBase): }) LOG.info("region={} user update uuid={} patch={}".format( self.region_name, iuser.uuid, patch)) - iuser = self.client.iuser.update(iuser.uuid, patch) + iuser = self.sysinv_client.iuser.update(iuser.uuid, patch) else: LOG.info("update_user no changes, skip user region={} " "update uuid={} passwd_hash={}".format( self.region_name, iuser.uuid, passwd_hash)) except Exception as e: LOG.error("update_user exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return iuser @@ -448,10 +557,10 @@ class SysinvClient(base.DriverBase): LOG.info("post_fernet_repo driver region={} " "fernet_repo_list={}".format(self.region_name, key_list)) try: - self.client.fernet.create(key_list) + self.sysinv_client.fernet.create(key_list) except Exception as e: LOG.error("post_fernet_repo exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e def put_fernet_repo(self, key_list): """Update the fernet keys for this region @@ -462,10 +571,10 @@ class SysinvClient(base.DriverBase): LOG.info("put_fernet_repo driver region={} " "fernet_repo_list={}".format(self.region_name, key_list)) try: - self.client.fernet.put(key_list) + self.sysinv_client.fernet.put(key_list) except Exception as e: LOG.error("put_fernet_repo exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e def get_fernet_keys(self): """Retrieve the fernet keys for this region @@ -474,9 +583,9 @@ class SysinvClient(base.DriverBase): """ try: - keys = self.client.fernet.list() + keys = self.sysinv_client.fernet.list() except Exception as e: LOG.error("get_fernet_keys exception={}".format(e)) - raise exceptions.SyncRequestFailedRetry() + raise e return keys diff --git a/distributedcloud/dcmanager/drivers/openstack/vim.py b/distributedcloud/dccommon/drivers/openstack/vim.py similarity index 96% rename from distributedcloud/dcmanager/drivers/openstack/vim.py rename to distributedcloud/dccommon/drivers/openstack/vim.py index 594fc887d..41b9d2bf2 100644 --- a/distributedcloud/dcmanager/drivers/openstack/vim.py +++ b/distributedcloud/dccommon/drivers/openstack/vim.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -23,9 +23,10 @@ from oslo_log import log from nfv_client.openstack import sw_update -from dcmanager.common import consts -from dcmanager.common import exceptions -from dcmanager.drivers import base +from dccommon import consts +from dccommon.drivers import base +from dccommon import exceptions + LOG = log.getLogger(__name__) diff --git a/distributedcloud/dcorch/common/endpoint_cache.py b/distributedcloud/dccommon/endpoint_cache.py similarity index 96% rename from distributedcloud/dcorch/common/endpoint_cache.py rename to distributedcloud/dccommon/endpoint_cache.py index 1de57518e..eb1ebc002 100644 --- a/distributedcloud/dcorch/common/endpoint_cache.py +++ b/distributedcloud/dccommon/endpoint_cache.py @@ -12,6 +12,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copyright (c) 2018-2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# import collections import threading @@ -24,7 +31,7 @@ from keystoneclient.v3 import client as keystone_client from oslo_config import cfg from oslo_log import log as logging -from dcorch.common import consts +from dccommon import consts LOG = logging.getLogger(__name__) diff --git a/distributedcloud/dccommon/exceptions.py b/distributedcloud/dccommon/exceptions.py new file mode 100644 index 000000000..0e09c41d1 --- /dev/null +++ b/distributedcloud/dccommon/exceptions.py @@ -0,0 +1,105 @@ +# Copyright 2015 Huawei Technologies Co., Ltd. +# Copyright 2015 Ericsson AB. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# + +""" +DC Orchestrator base exception handling. +""" +import six + +from oslo_utils import encodeutils +from oslo_utils import excutils + +from dcorch.common.i18n import _ + + +class DCCommonException(Exception): + """Base Commond Driver Exception. + + To correctly use this class, inherit from it and define + a 'message' property. That message will get printf'd + with the keyword arguments provided to the constructor. + """ + + message = _("An unknown exception occurred.") + + def __init__(self, **kwargs): + try: + super(DCCommonException, self).__init__(self.message % kwargs) + self.msg = self.message % kwargs + except Exception: + with excutils.save_and_reraise_exception() as ctxt: + if not self.use_fatal_exceptions(): + ctxt.reraise = False + # at least get the core message out if something happened + super(DCCommonException, self).__init__(self.message) + + if six.PY2: + def __unicode__(self): + return encodeutils.exception_to_unicode(self.msg) + + def use_fatal_exceptions(self): + return False + + +class NotFound(DCCommonException): + pass + + +class Conflict(DCCommonException): + pass + + +class ServiceUnavailable(DCCommonException): + message = _("The service is unavailable") + + +class InvalidInputError(DCCommonException): + message = _("An invalid value was provided") + + +class InternalError(DCCommonException): + message = _("Error when performing operation") + + +class OAMAddressesNotFound(NotFound): + message = _("OAM Addresses Not Found") + + +class TrapDestAlreadyExists(Conflict): + message = _("TrapDest in region=%(region_name)s ip_address=%(ip_address)s " + "community=%(community)s already exists") + + +class TrapDestNotFound(NotFound): + message = _("Trapdest in region=%(region_name)s with ip_address " + "%(ip_address)s not found") + + +class CommunityAlreadyExists(Conflict): + message = _("Community %(community)s in region=%(region_name)s " + "already exists") + + +class CommunityNotFound(NotFound): + message = _("Community %(community)s not found in region=%(region_name)s") + + +class CertificateNotFound(NotFound): + message = _("Certificate in region=%(region_name)s with signature " + "%(signature)s not found") diff --git a/distributedcloud/dcmanager/tests/unit/drivers/__init__.py b/distributedcloud/dccommon/tests/__init__.py similarity index 100% rename from distributedcloud/dcmanager/tests/unit/drivers/__init__.py rename to distributedcloud/dccommon/tests/__init__.py diff --git a/distributedcloud/dccommon/tests/base.py b/distributedcloud/dccommon/tests/base.py new file mode 100644 index 000000000..d3e6cede0 --- /dev/null +++ b/distributedcloud/dccommon/tests/base.py @@ -0,0 +1,32 @@ +# Copyright (c) 2015 Ericsson AB +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# + +from dccommon.tests import utils +from oslotest import base + + +class DCCommonTestCase(base.BaseTestCase): + """Test case base class for all unit tests.""" + + def setUp(self): + super(DCCommonTestCase, self).setUp() + self.ctx = utils.dummy_context() diff --git a/distributedcloud/dccommon/tests/unit/__init__.py b/distributedcloud/dccommon/tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/distributedcloud/dccommon/tests/unit/drivers/__init__.py b/distributedcloud/dccommon/tests/unit/drivers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/distributedcloud/dcmanager/tests/unit/drivers/test_keystone_v3.py b/distributedcloud/dccommon/tests/unit/drivers/test_keystone_v3.py similarity index 95% rename from distributedcloud/dcmanager/tests/unit/drivers/test_keystone_v3.py rename to distributedcloud/dccommon/tests/unit/drivers/test_keystone_v3.py index 5c71967b3..aaa720d5b 100644 --- a/distributedcloud/dcmanager/tests/unit/drivers/test_keystone_v3.py +++ b/distributedcloud/dccommon/tests/unit/drivers/test_keystone_v3.py @@ -10,7 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -19,10 +19,9 @@ import mock -from dcmanager.tests import base -from dcmanager.tests import utils - -from dcorch.drivers.openstack import keystone_v3 +from dccommon.drivers.openstack import keystone_v3 +from dccommon.tests import base +from dccommon.tests import utils FAKE_SERVICE = [ 'endpoint_volume', @@ -50,7 +49,7 @@ class FakeEndpoint(object): self.region = region -class TestKeystoneClient(base.DCManagerTestCase): +class TestKeystoneClient(base.DCCommonTestCase): def setUp(self): super(TestKeystoneClient, self).setUp() self.ctx = utils.dummy_context() diff --git a/distributedcloud/dccommon/tests/unit/drivers/test_sdk_platform.py b/distributedcloud/dccommon/tests/unit/drivers/test_sdk_platform.py new file mode 100644 index 000000000..a6f8dc165 --- /dev/null +++ b/distributedcloud/dccommon/tests/unit/drivers/test_sdk_platform.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# + +import mock + +from dccommon.drivers.openstack import sdk_platform as sdk +from dccommon import exceptions +from dccommon.tests import base + + +class TestOpenStackDriver(base.DCCommonTestCase): + + @mock.patch.object(sdk, 'KeystoneClient') + @mock.patch.object(sdk.OpenStackDriver, '_is_token_valid') + def test_init(self, mock_keystone_client, mock_is_token_valid): + region_name = 'subcloud1' + + os_client = sdk.OpenStackDriver(region_name, region_clients=None) + self.assertIsNotNone(os_client) + new_keystone_client = os_client.keystone_client + self.assertIsNotNone(new_keystone_client) + mock_is_token_valid(region_name).return_value = True + cached_keystone_client = sdk.OpenStackDriver( + region_name, region_clients=None).keystone_client + self.assertEqual(new_keystone_client, cached_keystone_client) + + self.assertRaises(exceptions.InvalidInputError, + sdk.OpenStackDriver, + region_name, region_clients=['fake_client']) diff --git a/distributedcloud/dcmanager/tests/unit/drivers/test_sysinv_v1.py b/distributedcloud/dccommon/tests/unit/drivers/test_sysinv_v1.py similarity index 93% rename from distributedcloud/dcmanager/tests/unit/drivers/test_sysinv_v1.py rename to distributedcloud/dccommon/tests/unit/drivers/test_sysinv_v1.py index b1bdd2da6..a2c67dbf0 100644 --- a/distributedcloud/dcmanager/tests/unit/drivers/test_sysinv_v1.py +++ b/distributedcloud/dccommon/tests/unit/drivers/test_sysinv_v1.py @@ -10,7 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -19,10 +19,11 @@ import mock +from dccommon.drivers.openstack import sysinv_v1 +from dccommon.tests import base +from dccommon.tests import utils from dcmanager.common import consts -from dcmanager.drivers.openstack import sysinv_v1 -from dcmanager.tests import base -from dcmanager.tests import utils +from dcmanager.tests import utils as dcmanager_utils from ddt import ddt from ddt import file_data @@ -61,7 +62,7 @@ class FakeRoute(object): @ddt -class TestSysinvClient(base.DCManagerTestCase): +class TestSysinvClient(base.DCCommonTestCase): def setUp(self): super(TestSysinvClient, self).setUp() self.ctx = utils.dummy_context() @@ -110,7 +111,7 @@ class TestSysinvClient(base.DCManagerTestCase): management_pool = sysinv_client.get_management_address_pool() self.assertEqual(pool, management_pool) - @file_data(utils.get_data_filepath('sysinv', 'routes')) + @file_data(dcmanager_utils.get_data_filepath('sysinv', 'routes')) @mock.patch.object(sysinv_v1.SysinvClient, '__init__') def test_create_route(self, value, mock_sysinvclient_init): fake_route = utils.create_route_dict(value) @@ -129,7 +130,7 @@ class TestSysinvClient(base.DCManagerTestCase): network=fake_route['network'], prefix=fake_route['prefix'], gateway=fake_route['gateway'], metric=fake_route['metric']) - @file_data(utils.get_data_filepath('sysinv', 'routes')) + @file_data(dcmanager_utils.get_data_filepath('sysinv', 'routes')) @mock.patch.object(sysinv_v1.SysinvClient, '__init__') def test_delete_route(self, value, mock_sysinvclient_init): # fake_route = utils.create_route_dict(value) diff --git a/distributedcloud/dcmanager/tests/unit/common/test_endpoint_cache.py b/distributedcloud/dccommon/tests/unit/test_endpoint_cache.py similarity index 87% rename from distributedcloud/dcmanager/tests/unit/common/test_endpoint_cache.py rename to distributedcloud/dccommon/tests/unit/test_endpoint_cache.py index eaad2ecf4..a7933ceec 100644 --- a/distributedcloud/dcmanager/tests/unit/common/test_endpoint_cache.py +++ b/distributedcloud/dccommon/tests/unit/test_endpoint_cache.py @@ -13,7 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -22,13 +22,16 @@ from mock import patch -from dcmanager.tests import base -from dcmanager.tests import utils +from oslo_config import cfg + +from dccommon import endpoint_cache +from dccommon.tests import base +from dccommon.tests import utils +from dcmanager.tests import utils as dcmanager_utils from ddt import ddt from ddt import file_data -from dcorch.common import endpoint_cache FAKE_REGION = 'fake_region' FAKE_SERVICE = 'fake_service' @@ -45,11 +48,15 @@ FAKE_NEUTRON_URL_1 = 'fake_url_neutron_1' @ddt -class EndpointCacheTest(base.DCManagerTestCase): +class EndpointCacheTest(base.DCCommonTestCase): def setUp(self): super(EndpointCacheTest, self).setUp() + auth_uri_opts = [ + cfg.StrOpt('auth_uri', + default="fake_auth_uri")] + cfg.CONF.register_opts(auth_uri_opts, 'cache') - @file_data(utils.get_data_filepath('keystone', 'endpoint')) + @file_data(dcmanager_utils.get_data_filepath('keystone', 'endpoint')) @patch.object(endpoint_cache.EndpointCache, '_initialize_keystone_client') @patch.object(endpoint_cache.EndpointCache, '_get_endpoint_from_keystone') def test_get_endpoint(self, value, mock_method, mock_init): @@ -62,7 +69,7 @@ class EndpointCacheTest(base.DCManagerTestCase): endpoint_dict['service_id']), endpoint_dict['url']) - @file_data(utils.get_data_filepath('keystone', 'endpoint')) + @file_data(dcmanager_utils.get_data_filepath('keystone', 'endpoint')) @patch.object(endpoint_cache.EndpointCache, '_initialize_keystone_client') @patch.object(endpoint_cache.EndpointCache, '_get_endpoint_from_keystone') def test_get_endpoint_not_found(self, value, mock_method, mock_init): @@ -76,7 +83,7 @@ class EndpointCacheTest(base.DCManagerTestCase): self.assertEqual(cache.get_endpoint(endpoint_dict['region_id'], 'another_fake_service'), '') - @file_data(utils.get_data_filepath('keystone', 'endpoint')) + @file_data(dcmanager_utils.get_data_filepath('keystone', 'endpoint')) @patch.object(endpoint_cache.EndpointCache, '_initialize_keystone_client') @patch.object(endpoint_cache.EndpointCache, '_get_endpoint_from_keystone') def test_get_endpoint_retry(self, value, mock_method, mock_init): @@ -89,7 +96,7 @@ class EndpointCacheTest(base.DCManagerTestCase): endpoint_dict['service_id']), 'another_fake_url') - @file_data(utils.get_data_filepath('keystone', 'endpoint')) + @file_data(dcmanager_utils.get_data_filepath('keystone', 'endpoint')) @patch.object(endpoint_cache.EndpointCache, '_initialize_keystone_client') @patch.object(endpoint_cache.EndpointCache, '_get_endpoint_from_keystone') def test_update_endpoint(self, value, mock_method, mock_init): diff --git a/distributedcloud/dccommon/tests/utils.py b/distributedcloud/dccommon/tests/utils.py new file mode 100644 index 000000000..bc59080ba --- /dev/null +++ b/distributedcloud/dccommon/tests/utils.py @@ -0,0 +1,59 @@ +# Copyright (c) 2015 Ericsson AB +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copyright (c) 2020 Wind River Systems, Inc. +# +# The right to copy, distribute, modify, or otherwise make use +# of this software may be licensed only pursuant to the terms +# of an applicable Wind River license agreement. +# + +from oslo_context import context + + +def create_route_dict(data_list): + return {'created-at': data_list[0], + 'updated-at': data_list[1], + 'deleted-at': data_list[2], + 'id': data_list[3], + 'uuid': data_list[4], + 'family': data_list[5], + 'network': data_list[6], + 'prefix': data_list[7], + 'gateway': data_list[8], + 'metric': data_list[9], + 'interface-id': data_list[10]} + + +def create_endpoint_dict(data_list): + return {'id': data_list[0], + 'legacy_endpoint_id': data_list[1], + 'interface': data_list[2], + 'service_id': data_list[3], + 'url': data_list[4], + 'extra': data_list[5], + 'enabled': data_list[6], + 'region_id': data_list[7]} + + +def dummy_context(user='test_username', tenant='test_project_id', + region_name=None): + return context.RequestContext.from_dict({ + 'auth_token': 'abcd1234', + 'user': user, + 'project': tenant, + 'is_admin': True, + 'region_name': region_name + }) diff --git a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py index fc4b173c5..0bebabb15 100644 --- a/distributedcloud/dcmanager/api/controllers/v1/subclouds.py +++ b/distributedcloud/dcmanager/api/controllers/v1/subclouds.py @@ -36,7 +36,10 @@ from controllerconfig.common.exceptions import ValidateFail from controllerconfig.utils import validate_address_str from controllerconfig.utils import validate_network_str -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient +from dccommon import exceptions as dccommon_exceptions + from keystoneauth1 import exceptions as keystone_exceptions from dcmanager.api.controllers import restcomm @@ -46,7 +49,7 @@ from dcmanager.common.i18n import _ from dcmanager.common import install_consts from dcmanager.common import utils from dcmanager.db import api as db_api -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient + from dcmanager.rpc import client as rpc_client CONF = cfg.CONF @@ -356,7 +359,7 @@ class SubcloudsController(object): message = ("Identity endpoint for subcloud: %s not found. %s" % (subcloud_name, e)) LOG.error(message) - except exceptions.OAMAddressesNotFound: + except dccommon_exceptions.OAMAddressesNotFound: message = ("OAM addresses for subcloud: %s not found." % (subcloud_name)) LOG.error(message) diff --git a/distributedcloud/dcmanager/api/controllers/v1/sw_update_options.py b/distributedcloud/dcmanager/api/controllers/v1/sw_update_options.py index d91b56297..918e4e150 100644 --- a/distributedcloud/dcmanager/api/controllers/v1/sw_update_options.py +++ b/distributedcloud/dcmanager/api/controllers/v1/sw_update_options.py @@ -27,6 +27,7 @@ import pecan from pecan import expose from pecan import request +from dccommon import consts as dccommon_consts from dcmanager.api.controllers import restcomm from dcmanager.common import consts from dcmanager.common import exceptions @@ -128,7 +129,7 @@ class SwUpdateOptionsController(object): if subcloud_ref == consts.DEFAULT_REGION_NAME: # update default options - subcloud_name = consts.SW_UPDATE_DEFAULT_TITLE + subcloud_name = dccommon_consts.SW_UPDATE_DEFAULT_TITLE if db_api.sw_update_opts_default_get(context): # entry already in db, update it. diff --git a/distributedcloud/dcmanager/common/consts.py b/distributedcloud/dcmanager/common/consts.py index 84c7f0dc9..cf4aca1b6 100644 --- a/distributedcloud/dcmanager/common/consts.py +++ b/distributedcloud/dcmanager/common/consts.py @@ -87,8 +87,6 @@ STRATEGY_STATE_COMPLETE = "complete" STRATEGY_STATE_ABORTED = "aborted" STRATEGY_STATE_FAILED = "failed" -SW_UPDATE_DEFAULT_TITLE = "all clouds default" - # Subcloud deploy status states DEPLOY_STATE_NONE = 'not-deployed' DEPLOY_STATE_PRE_INSTALL = 'pre-install' diff --git a/distributedcloud/dcmanager/common/exceptions.py b/distributedcloud/dcmanager/common/exceptions.py index 1bde8edd8..fb603943d 100644 --- a/distributedcloud/dcmanager/common/exceptions.py +++ b/distributedcloud/dcmanager/common/exceptions.py @@ -136,10 +136,6 @@ class InternalError(DCManagerException): message = _("Error when performing operation") -class OAMAddressesNotFound(NotFound): - message = _("OAM Addresses Not Found") - - class InvalidInputError(DCManagerException): message = _("An invalid value was provided") diff --git a/distributedcloud/dcmanager/common/utils.py b/distributedcloud/dcmanager/common/utils.py index 7baf04881..1ac04c25c 100644 --- a/distributedcloud/dcmanager/common/utils.py +++ b/distributedcloud/dcmanager/common/utils.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -31,10 +31,10 @@ from oslo_concurrency import lockutils from oslo_config import cfg from oslo_log import log as logging -from dcmanager.common import consts +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack import vim from dcmanager.common import exceptions from dcmanager.db import api as db_api -from dcmanager.drivers.openstack import vim from dcorch.common import consts as dcorch_consts LOG = logging.getLogger(__name__) @@ -103,7 +103,7 @@ def get_sw_update_opts(context, subcloud_id=subcloud_id) return db_api.sw_update_opts_w_name_db_model_to_dict( - sw_update_opts_ref, consts.SW_UPDATE_DEFAULT_TITLE) + sw_update_opts_ref, dccommon_consts.SW_UPDATE_DEFAULT_TITLE) def ensure_lock_path(): diff --git a/distributedcloud/dcmanager/db/sqlalchemy/migrate_repo/versions/001_first_version.py b/distributedcloud/dcmanager/db/sqlalchemy/migrate_repo/versions/001_first_version.py index 8f1cc8044..a70cc7c8c 100644 --- a/distributedcloud/dcmanager/db/sqlalchemy/migrate_repo/versions/001_first_version.py +++ b/distributedcloud/dcmanager/db/sqlalchemy/migrate_repo/versions/001_first_version.py @@ -13,14 +13,14 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms # of an applicable Wind River license agreement. # -from dcmanager.drivers.openstack import vim +from dccommon.drivers.openstack import vim import sqlalchemy diff --git a/distributedcloud/dcmanager/drivers/openstack/sysinv_v1.py b/distributedcloud/dcmanager/drivers/openstack/sysinv_v1.py deleted file mode 100644 index 5909c3092..000000000 --- a/distributedcloud/dcmanager/drivers/openstack/sysinv_v1.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2016 Ericsson AB - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copyright (c) 2017 Wind River Systems, Inc. -# -# The right to copy, distribute, modify, or otherwise make use -# of this software may be licensed only pursuant to the terms -# of an applicable Wind River license agreement. -# - -from oslo_log import log - -from sysinv.common import constants as sysinv_constants - -# from dcmanager.common import consts -from dcmanager.common import exceptions -from dcmanager.drivers import base - -LOG = log.getLogger(__name__) -API_VERSION = '1' - - -class SysinvClient(base.DriverBase): - """Sysinv V1 driver.""" - - def __init__(self, region, session): - try: - # TOX cannot import cgts_client and all the dependencies therefore - # the client is being lazy loaded since TOX doesn't actually - # require the cgtsclient module. - from cgtsclient import client - - # The sysinv client doesn't support a session, so we need to - # get an endpoint and token. - endpoint = session.get_endpoint(service_type='platform', - region_name=region, - interface='internal') - token = session.get_token() - - self.sysinv_client = client.Client(API_VERSION, - endpoint=endpoint, - token=token) - except exceptions.ServiceUnavailable: - raise - - def get_controller_hosts(self): - """Get a list of controller hosts.""" - return self.sysinv_client.ihost.list_personality( - sysinv_constants.CONTROLLER) - - def get_management_interface(self, hostname): - """Get the management interface for a host.""" - interfaces = self.sysinv_client.iinterface.list(hostname) - for interface in interfaces: - interface_networks = self.sysinv_client.interface_network.\ - list_by_interface(interface.uuid) - for if_net in interface_networks: - if if_net.network_type == sysinv_constants.NETWORK_TYPE_MGMT: - return interface - - # This can happen if the host is still being installed and has not - # yet created its management interface. - LOG.warning("Management interface on host %s not found" % hostname) - return None - - def get_management_address_pool(self): - """Get the management address pool for a host.""" - networks = self.sysinv_client.network.list() - for network in networks: - if network.type == sysinv_constants.NETWORK_TYPE_MGMT: - address_pool_uuid = network.pool_uuid - break - else: - LOG.error("Management address pool not found") - raise exceptions.InternalError() - - return self.sysinv_client.address_pool.get(address_pool_uuid) - - def get_oam_addresses(self): - """Get the oam address pool for a host.""" - iextoam_object = self.sysinv_client.iextoam.list() - if iextoam_object is not None and len(iextoam_object) != 0: - return iextoam_object[0] - else: - LOG.error("OAM address not found") - raise exceptions.OAMAddressesNotFound() - - def create_route(self, interface_uuid, network, prefix, gateway, metric): - """Create a static route on an interface.""" - - LOG.info("Creating route: interface: %s dest: %s/%s " - "gateway: %s metric %s" % (interface_uuid, network, - prefix, gateway, metric)) - self.sysinv_client.route.create(interface_uuid=interface_uuid, - network=network, - prefix=prefix, - gateway=gateway, - metric=metric) - - def delete_route(self, interface_uuid, network, prefix, gateway, metric): - """Delete a static route.""" - - # Get the routes for this interface - routes = self.sysinv_client.route.list_by_interface(interface_uuid) - for route in routes: - if (route.network == network and route.prefix == prefix and - route.gateway == gateway and route.metric == metric): - LOG.info("Deleting route: interface: %s dest: %s/%s " - "gateway: %s metric %s" % (interface_uuid, network, - prefix, gateway, metric)) - self.sysinv_client.route.delete(route.uuid) - return - - LOG.warning("Route not found: interface: %s dest: %s/%s gateway: %s " - "metric %s" % (interface_uuid, network, prefix, gateway, - metric)) - - def get_service_groups(self): - """Get a list of service groups.""" - return self.sysinv_client.sm_servicegroup.list() - - def get_loads(self): - """Get a list of loads.""" - return self.sysinv_client.load.list() - - def get_applications(self): - """Get a list of containerized applications""" - - # Get a list of containerized applications the system knows of - return self.sysinv_client.app.list() - - def get_system(self): - """Get the system.""" - systems = self.sysinv_client.isystem.list() - return systems[0] - - def get_service_parameters(self, name, value): - """Get service parameters for a given name.""" - opts = [] - opt = dict() - opt['field'] = name - opt['value'] = value - opt['op'] = 'eq' - opt['type'] = '' - opts.append(opt) - parameters = self.sysinv_client.service_parameter.list(q=opts) - return parameters - - def get_registry_image_tags(self, image_name): - """Get the image tags for an image from the local registry""" - image_tags = self.sysinv_client.registry_image.tags(image_name) - return image_tags diff --git a/distributedcloud/dcmanager/manager/patch_audit_manager.py b/distributedcloud/dcmanager/manager/patch_audit_manager.py index 192844e9c..f7f6020b0 100644 --- a/distributedcloud/dcmanager/manager/patch_audit_manager.py +++ b/distributedcloud/dcmanager/manager/patch_audit_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -24,8 +24,12 @@ from keystoneauth1 import exceptions as keystone_exceptions from oslo_config import cfg from oslo_log import log as logging +from dccommon.drivers.openstack import patching_v1 +from dccommon.drivers.openstack.patching_v1 import PatchingClient +from dccommon.drivers.openstack.sdk_platform import OpenStackDriver +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient + from dcorch.common import consts as dcorch_consts -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient from dcmanager.common import consts from dcmanager.common import context @@ -33,10 +37,6 @@ from dcmanager.common.i18n import _ from dcmanager.common import manager from dcmanager.db import api as db_api -from dcmanager.drivers.openstack import patching_v1 -from dcmanager.drivers.openstack.patching_v1 import PatchingClient -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient - LOG = logging.getLogger(__name__) @@ -105,7 +105,9 @@ class PatchAuditManager(manager.Manager): LOG.info('Triggered patch audit.') try: - ks_client = KeystoneClient() + m_os_ks_client = OpenStackDriver( + region_name=consts.DEFAULT_REGION_NAME, + region_clients=None).keystone_client except Exception: LOG.warn('Failure initializing KeystoneClient, exiting audit.') return @@ -113,7 +115,7 @@ class PatchAuditManager(manager.Manager): # First query RegionOne to determine what patches should be applied # to the system. patching_client = PatchingClient( - consts.DEFAULT_REGION_NAME, ks_client.session) + consts.DEFAULT_REGION_NAME, m_os_ks_client.session) regionone_patches = patching_client.query() LOG.debug("regionone_patches: %s" % regionone_patches) @@ -142,7 +144,8 @@ class PatchAuditManager(manager.Manager): continue try: - sc_ks_client = KeystoneClient(subcloud.name) + sc_os_client = OpenStackDriver(region_name=subcloud.name, + region_clients=None) except (keystone_exceptions.EndpointNotFound, keystone_exceptions.ConnectFailure, keystone_exceptions.ConnectTimeout, @@ -154,16 +157,16 @@ class PatchAuditManager(manager.Manager): continue try: - patching_client = PatchingClient(subcloud.name, - sc_ks_client.session) + patching_client = PatchingClient( + subcloud.name, sc_os_client.keystone_client.session) except keystone_exceptions.EndpointNotFound: LOG.warn("Patching endpoint for online subcloud %s not found." % subcloud.name) continue try: - sysinv_client = SysinvClient(subcloud.name, - sc_ks_client.session) + sysinv_client = SysinvClient( + subcloud.name, sc_os_client.keystone_client.session) except keystone_exceptions.EndpointNotFound: LOG.warn("Sysinv endpoint for online subcloud %s not found." % subcloud.name) diff --git a/distributedcloud/dcmanager/manager/subcloud_audit_manager.py b/distributedcloud/dcmanager/manager/subcloud_audit_manager.py index 15b697238..9647df606 100644 --- a/distributedcloud/dcmanager/manager/subcloud_audit_manager.py +++ b/distributedcloud/dcmanager/manager/subcloud_audit_manager.py @@ -28,8 +28,9 @@ from fm_api import constants as fm_const from fm_api import fm_api from sysinv.common import constants as sysinv_constants -from dcorch.common import consts as dcorch_consts -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack.sdk_platform import OpenStackDriver +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from dcorch.rpc import client as dcorch_rpc_client from dcmanager.common import consts @@ -38,7 +39,6 @@ from dcmanager.common import exceptions from dcmanager.common.i18n import _ from dcmanager.common import manager from dcmanager.db import api as db_api -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient from dcmanager.manager import scheduler CONF = cfg.CONF @@ -47,7 +47,7 @@ LOG = logging.getLogger(__name__) # We will update the state of each subcloud in the dcorch about once per hour. # Calculate how many iterations that will be. SUBCLOUD_STATE_UPDATE_ITERATIONS = \ - dcorch_consts.SECONDS_IN_HOUR / CONF.scheduler.subcloud_audit_interval + dccommon_consts.SECONDS_IN_HOUR / CONF.scheduler.subcloud_audit_interval class SubcloudAuditManager(manager.Manager): @@ -94,9 +94,10 @@ class SubcloudAuditManager(manager.Manager): update_subcloud_state = False # Determine whether OpenStack is installed in central cloud - ks_client = KeystoneClient() + os_client = OpenStackDriver(region_name=consts.DEFAULT_REGION_NAME, + region_clients=None) sysinv_client = SysinvClient(consts.DEFAULT_REGION_NAME, - ks_client.session) + os_client.keystone_client.session) # This could be optimized in the future by attempting to get just the # one application. However, sysinv currently treats this as a failure # if the application is not installed and generates warning logs, so it @@ -163,9 +164,10 @@ class SubcloudAuditManager(manager.Manager): avail_to_set = consts.AVAILABILITY_OFFLINE try: - ks_client = KeystoneClient(subcloud_name) + os_client = OpenStackDriver(region_name=subcloud_name, + region_clients=None) sysinv_client = SysinvClient(subcloud_name, - ks_client.session) + os_client.keystone_client.session) except (keystone_exceptions.EndpointNotFound, keystone_exceptions.ConnectFailure, keystone_exceptions.ConnectTimeout, @@ -383,7 +385,7 @@ class SubcloudAuditManager(manager.Manager): remove_subcloud_sync_endpoint_type if dcm_update_func and dco_update_func: - endpoint_type_list = dcorch_consts.ENDPOINT_TYPES_LIST_OS + endpoint_type_list = dccommon_consts.ENDPOINT_TYPES_LIST_OS try: # Notify dcorch to add/remove sync endpoint type list dco_update_func(self.context, subcloud_name, diff --git a/distributedcloud/dcmanager/manager/subcloud_install.py b/distributedcloud/dcmanager/manager/subcloud_install.py index 57ef5cff2..9cd154fc2 100644 --- a/distributedcloud/dcmanager/manager/subcloud_install.py +++ b/distributedcloud/dcmanager/manager/subcloud_install.py @@ -30,11 +30,11 @@ from six.moves.urllib import error as urllib_error from six.moves.urllib import parse from six.moves.urllib import request +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from dcmanager.common import consts from dcmanager.common import exceptions from dcmanager.common import install_consts -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient from oslo_log import log as logging diff --git a/distributedcloud/dcmanager/manager/subcloud_manager.py b/distributedcloud/dcmanager/manager/subcloud_manager.py index 031939438..da7736ee9 100644 --- a/distributedcloud/dcmanager/manager/subcloud_manager.py +++ b/distributedcloud/dcmanager/manager/subcloud_manager.py @@ -35,8 +35,10 @@ from oslo_messaging import RemoteError from tsconfig.tsconfig import CONFIG_PATH from tsconfig.tsconfig import SW_VERSION +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient + from dcorch.common import consts as dcorch_consts -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient from dcorch.rpc import client as dcorch_rpc_client from dcmanager.common import consts @@ -46,7 +48,6 @@ from dcmanager.common.i18n import _ from dcmanager.common import manager from dcmanager.common import utils from dcmanager.db import api as db_api -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient from dcmanager.manager.subcloud_install import SubcloudInstall from fm_api import constants as fm_const diff --git a/distributedcloud/dcmanager/manager/sw_update_manager.py b/distributedcloud/dcmanager/manager/sw_update_manager.py index 850cae36e..9312f3b45 100644 --- a/distributedcloud/dcmanager/manager/sw_update_manager.py +++ b/distributedcloud/dcmanager/manager/sw_update_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -28,8 +28,12 @@ import time from keystoneauth1 import exceptions as keystone_exceptions from oslo_log import log as logging +from dccommon.drivers.openstack import patching_v1 +from dccommon.drivers.openstack.patching_v1 import PatchingClient +from dccommon.drivers.openstack.sdk_platform import OpenStackDriver +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient +from dccommon.drivers.openstack import vim from dcorch.common import consts as dcorch_consts -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient from dcmanager.common import consts from dcmanager.common import context @@ -38,10 +42,6 @@ from dcmanager.common.i18n import _ from dcmanager.common import manager from dcmanager.common import utils from dcmanager.db import api as db_api -from dcmanager.drivers.openstack import patching_v1 -from dcmanager.drivers.openstack.patching_v1 import PatchingClient -from dcmanager.drivers.openstack.sysinv_v1 import SysinvClient -from dcmanager.drivers.openstack import vim from dcmanager.manager.patch_audit_manager import PatchAuditManager from dcmanager.manager import scheduler @@ -329,10 +329,13 @@ class PatchOrchThread(threading.Thread): LOG.info("PatchOrchThread Stopped") @staticmethod - def get_ks_client(region_name=None): - """This will get a new keystone client (and new token)""" + def get_ks_client(region_name=consts.DEFAULT_REGION_NAME): + """This will get a cached keystone client (and token)""" try: - return KeystoneClient(region_name) + os_client = OpenStackDriver( + region_name=region_name, + region_clients=None) + return os_client.keystone_client except Exception: LOG.warn('Failure initializing KeystoneClient') raise diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_patch_audit_manager.py b/distributedcloud/dcmanager/tests/unit/manager/test_patch_audit_manager.py index 45389b6c4..7c1a3a6e1 100644 --- a/distributedcloud/dcmanager/tests/unit/manager/test_patch_audit_manager.py +++ b/distributedcloud/dcmanager/tests/unit/manager/test_patch_audit_manager.py @@ -10,7 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2017 Wind River Systems, Inc. +# Copyright (c) 2017-2020 Wind River Systems, Inc. # # The right to copy, distribute, modify, or otherwise make use # of this software may be licensed only pursuant to the terms @@ -188,10 +188,10 @@ class TestAuditManager(base.DCManagerTestCase): dcorch_messaging.setup("fake://", optional=True) @mock.patch.object(patch_audit_manager, 'PatchingClient') - @mock.patch.object(patch_audit_manager, 'KeystoneClient') + @mock.patch.object(patch_audit_manager, 'OpenStackDriver') @mock.patch.object(patch_audit_manager, 'context') def test_init(self, mock_context, - mock_keystone_client, + mock_openstack_driver, mock_patching_client): mock_context.get_admin_context.return_value = self.ctxt @@ -205,11 +205,11 @@ class TestAuditManager(base.DCManagerTestCase): @mock.patch.object(patch_audit_manager, 'SysinvClient') @mock.patch.object(patch_audit_manager, 'db_api') @mock.patch.object(patch_audit_manager, 'PatchingClient') - @mock.patch.object(patch_audit_manager, 'KeystoneClient') + @mock.patch.object(patch_audit_manager, 'OpenStackDriver') @mock.patch.object(patch_audit_manager, 'context') def test_periodic_patch_audit_in_sync( self, mock_context, - mock_keystone_client, + mock_openstack_driver, mock_patching_client, mock_db_api, mock_sysinv_client): @@ -243,11 +243,11 @@ class TestAuditManager(base.DCManagerTestCase): @mock.patch.object(patch_audit_manager, 'SysinvClient') @mock.patch.object(patch_audit_manager, 'db_api') @mock.patch.object(patch_audit_manager, 'PatchingClient') - @mock.patch.object(patch_audit_manager, 'KeystoneClient') + @mock.patch.object(patch_audit_manager, 'OpenStackDriver') @mock.patch.object(patch_audit_manager, 'context') def test_periodic_patch_audit_out_of_sync( self, mock_context, - mock_keystone_client, + mock_openstack_driver, mock_patching_client, mock_db_api, mock_sysinv_client): @@ -295,11 +295,11 @@ class TestAuditManager(base.DCManagerTestCase): @mock.patch.object(patch_audit_manager, 'db_api') @mock.patch.object(patch_audit_manager, 'PatchingClient') - @mock.patch.object(patch_audit_manager, 'KeystoneClient') + @mock.patch.object(patch_audit_manager, 'OpenStackDriver') @mock.patch.object(patch_audit_manager, 'context') def test_periodic_patch_audit_ignore_unmanaged_or_offline( self, mock_context, - mock_keystone_client, + mock_openstack_driver, mock_patching_client, mock_db_api): mock_context.get_admin_context.return_value = self.ctxt @@ -321,11 +321,11 @@ class TestAuditManager(base.DCManagerTestCase): @mock.patch.object(patch_audit_manager, 'SysinvClient') @mock.patch.object(patch_audit_manager, 'db_api') @mock.patch.object(patch_audit_manager, 'PatchingClient') - @mock.patch.object(patch_audit_manager, 'KeystoneClient') + @mock.patch.object(patch_audit_manager, 'OpenStackDriver') @mock.patch.object(patch_audit_manager, 'context') def test_periodic_patch_audit_extra_patches( self, mock_context, - mock_keystone_client, + mock_openstack_driver, mock_patching_client, mock_db_api, mock_sysinv_client): diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_audit_manager.py b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_audit_manager.py index c4a6c7348..a042eb91e 100644 --- a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_audit_manager.py +++ b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_audit_manager.py @@ -23,11 +23,11 @@ import mock import sys sys.modules['fm_core'] = mock.Mock() +from dccommon import consts as dccommon_consts from dcmanager.common import consts from dcmanager.db.sqlalchemy import api as db_api from dcmanager.manager import subcloud_audit_manager from dcmanager.manager import subcloud_manager -from dcorch.common import consts as dcorch_consts from dcmanager.tests import base @@ -198,8 +198,8 @@ class TestAuditManager(base.DCManagerTestCase): self.addCleanup(p.stop) # Mock the KeystoneClient - p = mock.patch.object(subcloud_audit_manager, 'KeystoneClient') - self.mock_keystone_client = p.start() + p = mock.patch.object(subcloud_audit_manager, 'OpenStackDriver') + self.mock_openstack_driver = p.start() self.addCleanup(p.stop) # Mock the context @@ -401,7 +401,7 @@ class TestAuditManager(base.DCManagerTestCase): # Verify the openstack endpoints were added self.fake_dcorch_api.add_subcloud_sync_endpoint_type.\ assert_called_with(mock.ANY, 'subcloud1', - dcorch_consts.ENDPOINT_TYPES_LIST_OS) + dccommon_consts.ENDPOINT_TYPES_LIST_OS) # Verify the subcloud openstack_installed was updated updated_subcloud = db_api.subcloud_get_by_name(self.ctx, 'subcloud1') diff --git a/distributedcloud/dcmanager/tests/utils.py b/distributedcloud/dcmanager/tests/utils.py index d8f9f0c9f..c09c2d76a 100644 --- a/distributedcloud/dcmanager/tests/utils.py +++ b/distributedcloud/dcmanager/tests/utils.py @@ -132,28 +132,3 @@ def create_subcloud_dict(data_list): 'external_oam_gateway_address': data_list[20], 'external_oam_floating_address': data_list[21], 'sysadmin_password': data_list[22]} - - -def create_route_dict(data_list): - return {'created-at': data_list[0], - 'updated-at': data_list[1], - 'deleted-at': data_list[2], - 'id': data_list[3], - 'uuid': data_list[4], - 'family': data_list[5], - 'network': data_list[6], - 'prefix': data_list[7], - 'gateway': data_list[8], - 'metric': data_list[9], - 'interface-id': data_list[10]} - - -def create_endpoint_dict(data_list): - return {'id': data_list[0], - 'legacy_endpoint_id': data_list[1], - 'interface': data_list[2], - 'service_id': data_list[3], - 'url': data_list[4], - 'extra': data_list[5], - 'enabled': data_list[6], - 'region_id': data_list[7]} diff --git a/distributedcloud/dcorch/api/proxy/apps/filter.py b/distributedcloud/dcorch/api/proxy/apps/filter.py index 9b0553c53..faef54113 100644 --- a/distributedcloud/dcorch/api/proxy/apps/filter.py +++ b/distributedcloud/dcorch/api/proxy/apps/filter.py @@ -20,16 +20,16 @@ from oslo_config import cfg from oslo_log import log as logging from oslo_service.wsgi import Request +from dccommon import consts as dccommon_consts from dcorch.api.proxy.apps.proxy import Proxy from dcorch.api.proxy.common.service import Middleware from dcorch.api.proxy.common import utils -from dcorch.common import consts LOG = logging.getLogger(__name__) filter_opts = [ cfg.StrOpt('user_header', - default=consts.TOPIC_ORCH_ENGINE, + default=dccommon_consts.USER_HEADER_VALUE, help='An application specific header'), ] diff --git a/distributedcloud/dcorch/api/proxy/common/utils.py b/distributedcloud/dcorch/api/proxy/common/utils.py index 470ba6c92..9890db07e 100644 --- a/distributedcloud/dcorch/api/proxy/common/utils.py +++ b/distributedcloud/dcorch/api/proxy/common/utils.py @@ -22,8 +22,9 @@ from six.moves.urllib.parse import urlparse from keystoneauth1 import exceptions as keystone_exceptions from oslo_log import log as logging +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack import sdk_platform as sdk from dcorch.common import consts -from dcorch.drivers.openstack import sdk_platform as sdk LOG = logging.getLogger(__name__) @@ -138,7 +139,7 @@ def set_request_forward_environ(req, remote_host, remote_port): def _get_fernet_keys(): """Get fernet keys from sysinv.""" - os_client = sdk.OpenStackDriver(region_name=consts.CLOUD_0, + os_client = sdk.OpenStackDriver(region_name=dccommon_consts.CLOUD_0, thread_name='proxy') try: key_list = os_client.sysinv_client.get_fernet_keys() @@ -146,12 +147,12 @@ def _get_fernet_keys(): except (keystone_exceptions.connection.ConnectTimeout, keystone_exceptions.ConnectFailure) as e: LOG.info("get_fernet_keys: cloud {} is not reachable [{}]" - .format(consts.CLOUD_0, str(e))) - sdk.OpenStackDriver.delete_region_clients(consts.CLOUD_0) + .format(dccommon_consts.CLOUD_0, str(e))) + sdk.OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0) return None except (AttributeError, TypeError) as e: LOG.info("get_fernet_keys error {}".format(e)) - sdk.OpenStackDriver.delete_region_clients(consts.CLOUD_0, + sdk.OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0, clear_token=True) return None except Exception as e: diff --git a/distributedcloud/dcorch/common/consts.py b/distributedcloud/dcorch/common/consts.py index f38a44fd4..1a1525ef9 100644 --- a/distributedcloud/dcorch/common/consts.py +++ b/distributedcloud/dcorch/common/consts.py @@ -63,14 +63,8 @@ RPC_API_VERSION = "1.0" TOPIC_ORCH_ENGINE = "dcorch-engine" -# Distributed Cloud constants -CLOUD_0 = "RegionOne" -VIRTUAL_MASTER_CLOUD = "SystemController" - ALARMS_DISABLED = "disabled" -USER_HEADER = {'User-Header': TOPIC_ORCH_ENGINE} - # SyncRequest States ORCH_REQUEST_NONE = None ORCH_REQUEST_QUEUED = "queued" # in database, not in thread @@ -134,14 +128,11 @@ ENDPOINT_TYPE_PATCHING = "patching" ENDPOINT_TYPE_IDENTITY = "identity" ENDPOINT_TYPE_FM = "faultmanagement" ENDPOINT_TYPE_NFV = "nfv" -ENDPOINT_TYPE_IDENTITY_OS = "identity_openstack" # platform endpoint types ENDPOINT_TYPES_LIST = [ENDPOINT_TYPE_PLATFORM, ENDPOINT_TYPE_PATCHING, ENDPOINT_TYPE_IDENTITY] -# openstack endpoint types -ENDPOINT_TYPES_LIST_OS = [ENDPOINT_TYPE_IDENTITY_OS] ENDPOINT_QUOTA_MAPPING = { ENDPOINT_TYPE_COMPUTE: NOVA_QUOTA_FIELDS, @@ -149,10 +140,6 @@ ENDPOINT_QUOTA_MAPPING = { ENDPOINT_TYPE_VOLUME: CINDER_QUOTA_FIELDS, } -KS_ENDPOINT_ADMIN = "admin" -KS_ENDPOINT_INTERNAL = "internal" -KS_ENDPOINT_DEFAULT = KS_ENDPOINT_INTERNAL - # DB sync agent endpoint DBS_ENDPOINT_INTERNAL = "internal" DBS_ENDPOINT_DEFAULT = DBS_ENDPOINT_INTERNAL @@ -186,8 +173,6 @@ ALARM_OK_STATUS = "OK" ALARM_DEGRADED_STATUS = "degraded" ALARM_CRITICAL_STATUS = "critical" -SECONDS_IN_HOUR = 3600 - # Subcloud initial sync state INITIAL_SYNC_STATE_NONE = "none" INITIAL_SYNC_STATE_REQUESTED = "requested" diff --git a/distributedcloud/dcorch/common/exceptions.py b/distributedcloud/dcorch/common/exceptions.py index c760d4f28..3bd25c790 100644 --- a/distributedcloud/dcorch/common/exceptions.py +++ b/distributedcloud/dcorch/common/exceptions.py @@ -207,27 +207,3 @@ class OrchRequestAlreadyExists(Conflict): class ObjectActionError(OrchestratorException): msg_fmt = _('Object action %(action)s failed because: %(reason)s') - - -class TrapDestAlreadyExists(Conflict): - message = _("TrapDest in region=%(region_name)s ip_address=%(ip_address)s " - "community=%(community)s already exists") - - -class TrapDestNotFound(NotFound): - message = _("Trapdest in region=%(region_name)s with ip_address " - "%(ip_address)s not found") - - -class CommunityAlreadyExists(Conflict): - message = _("Community %(community)s in region=%(region_name)s " - "already exists") - - -class CommunityNotFound(NotFound): - message = _("Community %(community)s not found in region=%(region_name)s") - - -class CertificateNotFound(NotFound): - message = _("Certificate in region=%(region_name)s with signature " - "%(signature)s not found") diff --git a/distributedcloud/dcorch/drivers/openstack/sdk.py b/distributedcloud/dcorch/drivers/openstack/sdk.py index 054c87d43..5a78bc35f 100644 --- a/distributedcloud/dcorch/drivers/openstack/sdk.py +++ b/distributedcloud/dcorch/drivers/openstack/sdk.py @@ -24,9 +24,10 @@ from oslo_utils import timeutils from dcorch.common import consts from dcorch.common import exceptions -from dcorch.drivers.openstack.fm import FmClient -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient -from dcorch.drivers.openstack.sysinv_v1 import SysinvClient +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack.fm import FmClient +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient # Gap, in seconds, to determine whether the given token is about to expire STALE_TOKEN_DURATION = 60 @@ -40,7 +41,7 @@ class OpenStackDriver(object): _identity_tokens = {} @lockutils.synchronized('dcorch-openstackdriver') - def __init__(self, region_name=consts.VIRTUAL_MASTER_CLOUD, + def __init__(self, region_name=dccommon_consts.VIRTUAL_MASTER_CLOUD, auth_url=None): # Check if objects are cached and try to use those self.region_name = region_name @@ -84,7 +85,7 @@ class OpenStackDriver(object): self.fm_client = FmClient( region_name, self.keystone_client.session, - endpoint_type=consts.KS_ENDPOINT_DEFAULT) + endpoint_type=dccommon_consts.KS_ENDPOINT_DEFAULT) OpenStackDriver.os_clients_dict[region_name][ 'fm'] = self.fm_client except Exception as exception: @@ -239,8 +240,8 @@ class OpenStackDriver(object): region_lists = \ KeystoneClient().endpoint_cache.get_all_regions() # nova, cinder, and neutron have no endpoints in consts.CLOUD_0 - if consts.CLOUD_0 in region_lists: - region_lists.remove(consts.CLOUD_0) + if dccommon_consts.CLOUD_0 in region_lists: + region_lists.remove(dccommon_consts.CLOUD_0) return region_lists except Exception as exception: LOG.error('Error Occurred: %s', exception.message) diff --git a/distributedcloud/dcorch/drivers/openstack/sdk_platform.py b/distributedcloud/dcorch/drivers/openstack/sdk_platform.py deleted file mode 100644 index 9bafbd8ff..000000000 --- a/distributedcloud/dcorch/drivers/openstack/sdk_platform.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright 2017-2018 Wind River Inc - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -OpenStack Driver -""" -import collections - -from oslo_concurrency import lockutils -from oslo_log import log -from oslo_utils import timeutils - -from dcorch.common import consts -from dcorch.drivers.openstack.fm import FmClient -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient -from dcorch.drivers.openstack.sysinv_v1 import SysinvClient - -# Gap, in seconds, to determine whether the given token is about to expire -STALE_TOKEN_DURATION = 60 - -LOG = log.getLogger(__name__) - -LOCK_NAME = 'dcorch-openstackdriver-platform' - - -class OpenStackDriver(object): - - os_clients_dict = collections.defaultdict(dict) - _identity_tokens = {} - - @lockutils.synchronized(LOCK_NAME) - def __init__(self, region_name=consts.CLOUD_0, thread_name='dcorch', - auth_url=None): - # Check if objects are cached and try to use those - self.region_name = region_name - self.sysinv_client = None - self.fm_client = None - - if ((region_name in OpenStackDriver.os_clients_dict) and - ('keystone' in self.os_clients_dict[region_name]) and - self._is_token_valid(region_name)): - self.keystone_client = \ - self.os_clients_dict[region_name]['keystone'] - else: - LOG.info("get new keystone client for subcloud %s", region_name) - try: - self.keystone_client = KeystoneClient(region_name, auth_url) - OpenStackDriver.os_clients_dict[region_name]['keystone'] =\ - self.keystone_client - except Exception as exception: - LOG.error('keystone_client region %s error: %s' % - (region_name, exception.message)) - - if ((region_name in OpenStackDriver.os_clients_dict) and - (thread_name in OpenStackDriver.os_clients_dict[region_name])): - - if ('sysinv' in OpenStackDriver.os_clients_dict[region_name] - [thread_name]): - LOG.debug('Using cached OS sysinv client objects %s %s' % - (region_name, thread_name)) - self.sysinv_client = OpenStackDriver.os_clients_dict[ - region_name][thread_name]['sysinv'] - - if ('fm' in OpenStackDriver.os_clients_dict[region_name] - [thread_name]): - LOG.debug('Using cached OS fm client objects %s %s' % - (region_name, thread_name)) - self.fm_client = OpenStackDriver.os_clients_dict[ - region_name][thread_name]['fm'] - else: - OpenStackDriver.os_clients_dict[region_name][thread_name] = {} - - if self.sysinv_client is None: - # Create new sysinv client object and cache it - try: - self.sysinv_client = SysinvClient(region_name, - self.keystone_client.session) - (OpenStackDriver.os_clients_dict[region_name][thread_name] - ['sysinv']) = self.sysinv_client - - except Exception as exception: - LOG.error('sysinv_client region %s thread %s error: %s' % - (region_name, thread_name, exception.message)) - - if self.fm_client is None: - # Create new fm client object and cache it - try: - self.fm_client = FmClient( - region_name, - self.keystone_client.session, - endpoint_type=consts.KS_ENDPOINT_DEFAULT) - (OpenStackDriver.os_clients_dict[region_name][thread_name] - ['fm']) = self.fm_client - except Exception as exception: - LOG.error('fm_client region %s thread %s error: %s' % - (region_name, thread_name, exception.message)) - - @classmethod - @lockutils.synchronized(LOCK_NAME) - def delete_region_clients(cls, region_name, clear_token=False): - LOG.warn("delete_region_clients=%s, clear_token=%s" % - (region_name, clear_token)) - if region_name in cls.os_clients_dict: - del cls.os_clients_dict[region_name] - if clear_token: - cls._identity_tokens[region_name] = None - - @classmethod - @lockutils.synchronized(LOCK_NAME) - def delete_region_clients_for_thread(cls, region_name, thread_name): - LOG.debug("delete_region_clients=%s, thread_name=%s" % - (region_name, thread_name)) - if (region_name in cls.os_clients_dict and - thread_name in cls.os_clients_dict[region_name]): - del cls.os_clients_dict[region_name][thread_name] - - def _is_token_valid(self, region_name): - try: - keystone = \ - OpenStackDriver.os_clients_dict[region_name]['keystone'].\ - keystone_client - if (not OpenStackDriver._identity_tokens - or region_name not in OpenStackDriver._identity_tokens - or not OpenStackDriver._identity_tokens[region_name]): - OpenStackDriver._identity_tokens[region_name] = \ - keystone.tokens.validate(keystone.session.get_token()) - LOG.info("Get new token for subcloud %s expires_at=%s" % - (region_name, - OpenStackDriver._identity_tokens[region_name] - ['expires_at'])) - # Reset the cached dictionary - OpenStackDriver.os_clients_dict[region_name] = \ - collections.defaultdict(dict) - return False - - token = \ - keystone.tokens.validate(OpenStackDriver._identity_tokens - [region_name]) - if token != OpenStackDriver._identity_tokens[region_name]: - LOG.info("updating token %s to %s" % - (OpenStackDriver._identity_tokens[region_name], - token)) - OpenStackDriver._identity_tokens[region_name] = token - OpenStackDriver.os_clients_dict[region_name] = \ - collections.defaultdict(dict) - return False - - except Exception as exception: - LOG.info('_is_token_valid handle: %s', exception.message) - # Reset the cached dictionary - OpenStackDriver.os_clients_dict[region_name] = \ - collections.defaultdict(dict) - OpenStackDriver._identity_tokens[region_name] = None - return False - - expiry_time = timeutils.normalize_time(timeutils.parse_isotime( - self._identity_tokens[region_name]['expires_at'])) - if timeutils.is_soon(expiry_time, STALE_TOKEN_DURATION): - LOG.info("The cached keystone token for subcloud %s " - "will expire soon %s" % - (region_name, - OpenStackDriver._identity_tokens[region_name] - ['expires_at'])) - # Reset the cached dictionary - OpenStackDriver.os_clients_dict[region_name] = \ - collections.defaultdict(dict) - OpenStackDriver._identity_tokens[region_name] = None - return False - else: - return True diff --git a/distributedcloud/dcorch/engine/alarm_aggregate_manager.py b/distributedcloud/dcorch/engine/alarm_aggregate_manager.py index 7dde7d848..fca1e855a 100644 --- a/distributedcloud/dcorch/engine/alarm_aggregate_manager.py +++ b/distributedcloud/dcorch/engine/alarm_aggregate_manager.py @@ -14,6 +14,8 @@ # limitations under the License. import datetime +from dccommon import consts as dccommon_consts +from dccommon import exceptions as dccommon_exceptions from dcmanager.common import consts as dcm_consts from dcorch.common import consts from dcorch.common import context @@ -22,10 +24,10 @@ from dcorch.common.i18n import _ from dcorch.common import manager from dcorch.db import api as db_api -from dcorch.drivers.openstack.fm import FmClient -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient -from dcorch.drivers.openstack import sdk_platform as sdk -from dcorch.drivers.openstack.sysinv_v1 import SysinvClient +from dccommon.drivers.openstack.fm import FmClient +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack import sdk_platform as sdk +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from oslo_config import cfg from oslo_log import log as logging @@ -63,7 +65,7 @@ class AlarmAggregateManager(manager.Manager): ks_client = KeystoneClient(subcloud_name) sysinv_client = SysinvClient(subcloud_name, ks_client.session) fm_client = FmClient(subcloud_name, ks_client.session, - consts.KS_ENDPOINT_DEFAULT) + dccommon_consts.KS_ENDPOINT_DEFAULT) sysinv_client.snmp_trapdest_create(payload) self.update_alarm_summary(self.context, subcloud_name, fm_client=fm_client) @@ -76,7 +78,7 @@ class AlarmAggregateManager(manager.Manager): LOG.info("snmp_trapdest_create AttributeError region_name=%s" % subcloud_name) pass - except exceptions.TrapDestAlreadyExists: + except dccommon_exceptions.TrapDestAlreadyExists: LOG.info("snmp_trapdest_create TrapDestAlreadyExists " "region_name=%s payload %s" % (subcloud_name, payload)) pass diff --git a/distributedcloud/dcorch/engine/fernet_key_manager.py b/distributedcloud/dcorch/engine/fernet_key_manager.py index 2aaa9c492..94ba7e111 100644 --- a/distributedcloud/dcorch/engine/fernet_key_manager.py +++ b/distributedcloud/dcorch/engine/fernet_key_manager.py @@ -20,14 +20,16 @@ from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack.keystone_v3 import KeystoneClient +from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from dcorch.common import consts from dcorch.common import context from dcorch.common import exceptions from dcorch.common.i18n import _ from dcorch.common import manager from dcorch.common import utils -from dcorch.drivers.openstack.keystone_v3 import KeystoneClient -from dcorch.drivers.openstack.sysinv_v1 import SysinvClient + FERNET_REPO_MASTER_ID = "keys" KEY_ROTATE_CMD = "/usr/bin/keystone-fernet-keys-rotate-active" @@ -67,7 +69,8 @@ class FernetKeyManager(manager.Manager): def _schedule_work(self, operation_type, subcloud=None): keys = self._get_master_keys() if not keys: - LOG.info(_("No fernet keys returned from %s") % consts.CLOUD_0) + LOG.info(_("No fernet keys returned from %s") % + dccommon_consts.CLOUD_0) return try: resource_info = FernetKeyManager.to_resource_info(keys) @@ -91,14 +94,14 @@ class FernetKeyManager(manager.Manager): try: # No cached client is required as it is called during the initial # sync and after weekly key rotation - ks_client = KeystoneClient(consts.CLOUD_0) - sysinv_client = SysinvClient(consts.CLOUD_0, + ks_client = KeystoneClient(dccommon_consts.CLOUD_0) + sysinv_client = SysinvClient(dccommon_consts.CLOUD_0, ks_client.session) keys = sysinv_client.get_fernet_keys() except (exceptions.ConnectionRefused, exceptions.NotAuthorized, exceptions.TimeOut): LOG.info(_("Retrieving the fernet keys from %s timeout") % - consts.CLOUD_0) + dccommon_consts.CLOUD_0) except Exception as e: LOG.info(_("Fail to retrieve the master fernet keys: %s") % e.message) @@ -122,7 +125,8 @@ class FernetKeyManager(manager.Manager): def distribute_keys(self, ctxt, subcloud_name): keys = self._get_master_keys() if not keys: - LOG.info(_("No fernet keys returned from %s") % consts.CLOUD_0) + LOG.info(_("No fernet keys returned from %s") % + dccommon_consts.CLOUD_0) return resource_info = FernetKeyManager.to_resource_info(keys) key_list = FernetKeyManager.from_resource_info(resource_info) diff --git a/distributedcloud/dcorch/engine/quota_manager.py b/distributedcloud/dcorch/engine/quota_manager.py index 25f40f7c9..c00964044 100644 --- a/distributedcloud/dcorch/engine/quota_manager.py +++ b/distributedcloud/dcorch/engine/quota_manager.py @@ -23,9 +23,10 @@ import time from oslo_config import cfg from oslo_log import log as logging +from dccommon import consts as dccommon_consts +from dccommon import endpoint_cache from dcorch.common import consts from dcorch.common import context -from dcorch.common import endpoint_cache from dcorch.common import exceptions from dcorch.common.i18n import _ from dcorch.common import manager @@ -114,7 +115,7 @@ class QuotaManager(manager.Manager): def get_projects_users_with_modified_quotas(self): # get the list of project/user tuples that have modified quotas project_user_list = set([]) - os_client = sdk.OpenStackDriver(consts.VIRTUAL_MASTER_CLOUD) + os_client = sdk.OpenStackDriver(dccommon_consts.VIRTUAL_MASTER_CLOUD) try: quotas = os_client.nova_client.nova_client.quotas.list() project_user_quotas = quotas['project_user_quotas'] @@ -306,7 +307,7 @@ class QuotaManager(manager.Manager): # are managed by dcorch so delete them from all regions except # the master one. for region in regions_usage_dict_copy: - if region == consts.VIRTUAL_MASTER_CLOUD: + if region == dccommon_consts.VIRTUAL_MASTER_CLOUD: continue for quota in consts.QUOTAS_FOR_MANAGED_RESOURCES: regions_usage_dict_copy[region].pop(quota, None) @@ -357,7 +358,7 @@ class QuotaManager(manager.Manager): # Remove the master region from the list. Its quotas should already # be up to date for managed resources. - region_lists.remove(consts.VIRTUAL_MASTER_CLOUD) + region_lists.remove(dccommon_consts.VIRTUAL_MASTER_CLOUD) # (NOTE: knasim-wrs): The Master Cloud's Project ID and User ID # dont mean anything for the subcloud, so we need to first resolve @@ -407,8 +408,9 @@ class QuotaManager(manager.Manager): def get_overall_tenant_quota_limits(self, project_id, user_id): # Return quota limits in the master cloud. These are the overall # quota limits for the whole cloud. - return self.get_tenant_quota_limits_region(project_id, user_id, - consts.VIRTUAL_MASTER_CLOUD) + return self.get_tenant_quota_limits_region( + project_id, user_id, + dccommon_consts.VIRTUAL_MASTER_CLOUD) def get_tenant_quota_usage_per_region(self, project_id, user_id): # Return quota usage dict with keys as region name & values as usages. diff --git a/distributedcloud/dcorch/engine/service.py b/distributedcloud/dcorch/engine/service.py index a0bde4f85..2fcfb4f4d 100644 --- a/distributedcloud/dcorch/engine/service.py +++ b/distributedcloud/dcorch/engine/service.py @@ -21,6 +21,7 @@ from oslo_config import cfg from oslo_log import log as logging import oslo_messaging +from dccommon import consts as dccommon_consts from dcmanager.common import consts as dcm_consts from dcorch.common import consts from dcorch.common import context @@ -134,10 +135,10 @@ class EngineService(service.Service): self.periodic_sync_audit, initial_delay=self.periodic_interval / 2) self.TG.add_timer(CONF.fernet.key_rotation_interval * - consts.SECONDS_IN_HOUR, + dccommon_consts.SECONDS_IN_HOUR, self.periodic_key_rotation, initial_delay=(CONF.fernet.key_rotation_interval - * consts.SECONDS_IN_HOUR)) + * dccommon_consts.SECONDS_IN_HOUR)) def service_registry_report(self): ctx = context.get_admin_context() diff --git a/distributedcloud/dcorch/engine/subcloud.py b/distributedcloud/dcorch/engine/subcloud.py index b9abaa967..e9671c073 100644 --- a/distributedcloud/dcorch/engine/subcloud.py +++ b/distributedcloud/dcorch/engine/subcloud.py @@ -15,6 +15,7 @@ import threading +from dccommon import consts as dccommon_consts from dcmanager.common import consts as dcm_consts from dcorch.common import consts as dco_consts from dcorch.engine.sync_services.identity import IdentitySyncThread @@ -28,7 +29,7 @@ LOG = logging.getLogger(__name__) syncthread_subclass_map = { dco_consts.ENDPOINT_TYPE_PLATFORM: SysinvSyncThread, dco_consts.ENDPOINT_TYPE_IDENTITY: IdentitySyncThread, - dco_consts.ENDPOINT_TYPE_IDENTITY_OS: IdentitySyncThread + dccommon_consts.ENDPOINT_TYPE_IDENTITY_OS: IdentitySyncThread } diff --git a/distributedcloud/dcorch/engine/sync_services/compute.py b/distributedcloud/dcorch/engine/sync_services/compute.py index dc537e6e1..917620085 100644 --- a/distributedcloud/dcorch/engine/sync_services/compute.py +++ b/distributedcloud/dcorch/engine/sync_services/compute.py @@ -21,6 +21,7 @@ from novaclient import utils as novaclient_utils from oslo_log import log as logging from oslo_serialization import jsonutils +from dccommon import consts as dccommon_consts from dcorch.common import consts from dcorch.common import exceptions from dcorch.common import utils @@ -60,7 +61,7 @@ class ComputeSyncThread(SyncThread): if (not self.sc_nova_client and self.sc_admin_session): self.sc_nova_client = novaclient.Client( '2.38', session=self.sc_admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, region_name=self.subcloud_engine.subcloud.region_name) def initialize(self): @@ -72,8 +73,8 @@ class ComputeSyncThread(SyncThread): # todo: update version to 2.53 once on pike self.m_nova_client = novaclient.Client( '2.38', session=self.admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, - region_name=consts.VIRTUAL_MASTER_CLOUD) + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, + region_name=dccommon_consts.VIRTUAL_MASTER_CLOUD) self.initialize_sc_clients() LOG.info("session and clients initialized", extra=self.log_extra) diff --git a/distributedcloud/dcorch/engine/sync_services/identity.py b/distributedcloud/dcorch/engine/sync_services/identity.py index b987e6282..efc2a4c1c 100644 --- a/distributedcloud/dcorch/engine/sync_services/identity.py +++ b/distributedcloud/dcorch/engine/sync_services/identity.py @@ -17,6 +17,7 @@ import base64 from collections import namedtuple +from dccommon import consts as dccommon_consts from dcdbsync.dbsyncclient import client as dbsyncclient from dcdbsync.dbsyncclient import exceptions as dbsync_exceptions from dcorch.common import consts @@ -94,7 +95,7 @@ class IdentitySyncThread(SyncThread): if (not self.sc_ks_client and self.sc_admin_session): self.sc_ks_client = keystoneclient.Client( session=self.sc_admin_session, - endpoint_type=consts.KS_ENDPOINT_ADMIN, + endpoint_type=dccommon_consts.KS_ENDPOINT_ADMIN, region_name=self.subcloud_engine.subcloud.region_name) # create a dbsync client for the subcloud if (not self.sc_dbs_client and self.sc_admin_session): @@ -231,7 +232,7 @@ class IdentitySyncThread(SyncThread): if not m_users: LOG.error("No users returned from {}". - format(consts.VIRTUAL_MASTER_CLOUD)) + format(dccommon_consts.VIRTUAL_MASTER_CLOUD)) raise exceptions.SyncRequestFailed # get users from the subcloud @@ -251,7 +252,7 @@ class IdentitySyncThread(SyncThread): if not m_projects: LOG.error("No projects returned from {}". - format(consts.VIRTUAL_MASTER_CLOUD)) + format(dccommon_consts.VIRTUAL_MASTER_CLOUD)) raise exceptions.SyncRequestFailed # get projects from the subcloud @@ -1485,7 +1486,8 @@ class IdentitySyncThread(SyncThread): self.m_ks_client) except dbsync_exceptions.Unauthorized as e: LOG.info("Get resource [{}] request failed for {}: {}." - .format(resource_type, consts.VIRTUAL_MASTER_CLOUD, + .format(resource_type, + dccommon_consts.VIRTUAL_MASTER_CLOUD, str(e)), extra=self.log_extra) # In case of token expires, re-authenticate and retry once self.reinitialize_m_clients() diff --git a/distributedcloud/dcorch/engine/sync_services/network.py b/distributedcloud/dcorch/engine/sync_services/network.py index 8de4f08cb..34f602c82 100644 --- a/distributedcloud/dcorch/engine/sync_services/network.py +++ b/distributedcloud/dcorch/engine/sync_services/network.py @@ -20,6 +20,7 @@ from neutronclient.neutron import client as neutronclient from oslo_log import log as logging from oslo_serialization import jsonutils +from dccommon import consts as dccommon_consts from dcorch.common import consts from dcorch.common import exceptions from dcorch.drivers.openstack import sdk @@ -61,7 +62,7 @@ class NetworkSyncThread(SyncThread): if (not self.sc_neutron_client and self.sc_admin_session): self.sc_neutron_client = neutronclient.Client( "2.0", session=self.sc_admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, region_name=self.subcloud_engine.subcloud.region_name) def initialize(self): @@ -72,8 +73,8 @@ class NetworkSyncThread(SyncThread): super(NetworkSyncThread, self).initialize() self.m_neutron_client = neutronclient.Client( "2.0", session=self.admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, - region_name=consts.VIRTUAL_MASTER_CLOUD) + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, + region_name=dccommon_consts.VIRTUAL_MASTER_CLOUD) self.initialize_sc_clients() LOG.info("session and clients initialized", extra=self.log_extra) diff --git a/distributedcloud/dcorch/engine/sync_services/sysinv.py b/distributedcloud/dcorch/engine/sync_services/sysinv.py index 348fc6e76..94f8c3eaf 100644 --- a/distributedcloud/dcorch/engine/sync_services/sysinv.py +++ b/distributedcloud/dcorch/engine/sync_services/sysinv.py @@ -1,4 +1,4 @@ -# Copyright 2017-2018 Wind River +# Copyright 2017-2020 Wind River # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,9 +19,12 @@ from requests_toolbelt import MultipartDecoder from oslo_log import log as logging from oslo_serialization import jsonutils +from dccommon import consts as dccommon_consts +from dccommon.drivers.openstack import sdk_platform as sdk +from dccommon import exceptions as dccommon_exceptions from dcorch.common import consts from dcorch.common import exceptions -from dcorch.drivers.openstack import sdk_platform as sdk + from dcorch.engine.fernet_key_manager import FERNET_REPO_MASTER_ID from dcorch.engine.fernet_key_manager import FernetKeyManager from dcorch.engine.sync_thread import AUDIT_RESOURCE_EXTRA @@ -210,7 +213,7 @@ class SysinvSyncThread(SyncThread): try: s_os_client.sysinv_client.snmp_trapdest_delete( subcloud_rsrc.subcloud_resource_id) - except exceptions.TrapDestNotFound: + except dccommon_exceptions.TrapDestNotFound: # SNMP trapdest already deleted in subcloud, carry on. LOG.info("SNMP trapdest not in subcloud, may be already deleted", extra=self.log_extra) @@ -277,7 +280,7 @@ class SysinvSyncThread(SyncThread): try: s_os_client.sysinv_client.snmp_community_delete( subcloud_rsrc.subcloud_resource_id) - except exceptions.CommunityNotFound: + except dccommon_exceptions.CommunityNotFound: # Community already deleted in subcloud, carry on. LOG.info("SNMP community not in subcloud, may be already deleted", extra=self.log_extra) @@ -397,11 +400,11 @@ class SysinvSyncThread(SyncThread): cert_to_delete = certificate break if not cert_to_delete: - raise exceptions.CertificateNotFound( + raise dccommon_exceptions.CertificateNotFound( region_name=self.subcloud_engine.subcloud.region_name, signature=subcloud_rsrc.subcloud_resource_id) s_os_client.sysinv_client.delete_certificate(cert_to_delete) - except exceptions.CertificateNotFound: + except dccommon_exceptions.CertificateNotFound: # Certificate already deleted in subcloud, carry on. LOG.info("Certificate not in subcloud, may be already deleted", extra=self.log_extra) @@ -564,8 +567,9 @@ class SysinvSyncThread(SyncThread): # SysInv Audit Related def get_master_resources(self, resource_type): try: - os_client = sdk.OpenStackDriver(region_name=consts.CLOUD_0, - thread_name=self.audit_thread.name) + os_client = sdk.OpenStackDriver( + region_name=dccommon_consts.CLOUD_0, + thread_name=self.audit_thread.name) if resource_type == consts.RESOURCE_TYPE_SYSINV_DNS: return [self.get_dns_resource(os_client)] elif resource_type == consts.RESOURCE_TYPE_SYSINV_SNMP_COMM: @@ -631,10 +635,11 @@ class SysinvSyncThread(SyncThread): return None def post_audit(self): + super(SysinvSyncThread, self).post_audit() sdk.OpenStackDriver.delete_region_clients_for_thread( self.region_name, self.audit_thread.name) sdk.OpenStackDriver.delete_region_clients_for_thread( - consts.CLOUD_0, self.audit_thread.name) + dccommon_consts.CLOUD_0, self.audit_thread.name) def get_dns_resource(self, os_client): return os_client.sysinv_client.get_dns() diff --git a/distributedcloud/dcorch/engine/sync_services/volume.py b/distributedcloud/dcorch/engine/sync_services/volume.py index 36c499683..2c2a00ebf 100644 --- a/distributedcloud/dcorch/engine/sync_services/volume.py +++ b/distributedcloud/dcorch/engine/sync_services/volume.py @@ -19,6 +19,7 @@ from keystoneauth1 import exceptions as keystone_exceptions from oslo_log import log as logging from oslo_serialization import jsonutils +from dccommon import consts as dccommon_consts from dcorch.common import consts from dcorch.common import exceptions from dcorch.engine import quota_manager @@ -55,7 +56,7 @@ class VolumeSyncThread(SyncThread): if (not self.sc_cinder_client and self.sc_admin_session): self.sc_cinder_client = cinderclient.Client( "3.0", session=self.sc_admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, region_name=self.subcloud_engine.subcloud.region_name) def initialize(self): @@ -66,8 +67,8 @@ class VolumeSyncThread(SyncThread): super(VolumeSyncThread, self).initialize() self.m_cinder_client = cinderclient.Client( "3.0", session=self.admin_session, - endpoint_type=consts.KS_ENDPOINT_INTERNAL, - region_name=consts.VIRTUAL_MASTER_CLOUD) + endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL, + region_name=dccommon_consts.VIRTUAL_MASTER_CLOUD) self.initialize_sc_clients() LOG.info("session and clients initialized", extra=self.log_extra) diff --git a/distributedcloud/dcorch/engine/sync_thread.py b/distributedcloud/dcorch/engine/sync_thread.py index cfeea1422..58127f679 100644 --- a/distributedcloud/dcorch/engine/sync_thread.py +++ b/distributedcloud/dcorch/engine/sync_thread.py @@ -1,4 +1,4 @@ -# Copyright 2017 Wind River +# Copyright 2017-2020 Wind River # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import threading +from oslo_concurrency import lockutils from oslo_config import cfg from oslo_log import log as logging +from dccommon import consts as dccommon_consts from dcdbsync.dbsyncclient import client as dbsyncclient from dcmanager.common import consts as dcmanager_consts from dcmanager.rpc import client as dcmanager_rpc_client @@ -54,11 +57,15 @@ STATE_COMPLETED = 'completed' AUDIT_RESOURCE_MISSING = 'missing' AUDIT_RESOURCE_EXTRA = 'extra_resource' +AUDIT_LOCK_NAME = 'dcorch-audit' + class SyncThread(object): """Manages tasks related to resource management.""" MAX_RETRY = 2 + # used by the audit to cache the master resources + master_resources_dict = collections.defaultdict(dict) def __init__(self, subcloud_engine, endpoint_type=None): super(SyncThread, self).__init__() @@ -71,7 +78,7 @@ class SyncThread(object): self.condition = threading.Condition() # used to wake up the thread self.ctxt = context.get_admin_context() self.sync_handler_map = {} - self.master_region_name = consts.CLOUD_0 + self.master_region_name = dccommon_consts.CLOUD_0 self.audit_resources = [] self.log_extra = { @@ -122,7 +129,7 @@ class SyncThread(object): config = None if self.endpoint_type in consts.ENDPOINT_TYPES_LIST: config = cfg.CONF.cache - elif self.endpoint_type in consts.ENDPOINT_TYPES_LIST_OS: + elif self.endpoint_type in dccommon_consts.ENDPOINT_TYPES_LIST_OS: config = cfg.CONF.openstack_cache else: raise exceptions.EndpointNotSupported( @@ -136,16 +143,18 @@ class SyncThread(object): project_domain_name=config.admin_project_domain_name, user_domain_name=config.admin_user_domain_name) self.admin_session = session.Session( - auth=auth, timeout=60, additional_headers=consts.USER_HEADER) + auth=auth, timeout=60, + additional_headers=dccommon_consts.USER_HEADER) + # keystone client self.ks_client = keystoneclient.Client( session=self.admin_session, - region_name=consts.CLOUD_0) + region_name=dccommon_consts.CLOUD_0) # dcdbsync client self.dbs_client = dbsyncclient.Client( endpoint_type=consts.DBS_ENDPOINT_INTERNAL, session=self.admin_session, - region_name=consts.CLOUD_0) + region_name=dccommon_consts.CLOUD_0) def initialize_sc_clients(self): # base implementation of initializing the subcloud specific @@ -158,7 +167,7 @@ class SyncThread(object): name='keystone', type='identity') sc_auth_url = self.ks_client.endpoints.list( service=identity_service[0].id, - interface=consts.KS_ENDPOINT_ADMIN, + interface=dccommon_consts.KS_ENDPOINT_ADMIN, region=self.subcloud_engine.subcloud.region_name) try: LOG.info("Found sc_auth_url: {}".format(sc_auth_url)) @@ -175,7 +184,7 @@ class SyncThread(object): config = None if self.endpoint_type in consts.ENDPOINT_TYPES_LIST: config = cfg.CONF.cache - elif self.endpoint_type in consts.ENDPOINT_TYPES_LIST_OS: + elif self.endpoint_type in dccommon_consts.ENDPOINT_TYPES_LIST_OS: config = cfg.CONF.openstack_cache sc_auth = loader.load_from_options( @@ -188,7 +197,7 @@ class SyncThread(object): self.sc_admin_session = session.Session( auth=sc_auth, timeout=60, - additional_headers=consts.USER_HEADER) + additional_headers=dccommon_consts.USER_HEADER) def initial_sync(self): # Return True to indicate initial sync success @@ -487,9 +496,12 @@ class SyncThread(object): extra=self.log_extra) self.post_audit() + @lockutils.synchronized(AUDIT_LOCK_NAME) def post_audit(self): - # The specific SyncThread subclasses may perform post audit actions - return + # reset the cached master resources + SyncThread.master_resources_dict = collections.defaultdict(dict) + # The specific SyncThread subclasses may perform additional post + # audit actions def audit_find_missing(self, resource_type, m_resources, db_resources, sc_resources, @@ -706,12 +718,19 @@ class SyncThread(object): if sc_resources is None: return m_resources, db_resources, sc_resources db_resources = self.get_db_master_resources(resource_type) - # todo: master resources will be read by multiple threads - # depending on the number of subclouds. Could do some kind of - # caching for performance improvement. - m_resources = self.get_master_resources(resource_type) + m_resources = self.get_cached_master_resources(resource_type) return m_resources, db_resources, sc_resources + @lockutils.synchronized(AUDIT_LOCK_NAME) + def get_cached_master_resources(self, resource_type): + if resource_type in SyncThread.master_resources_dict: + m_resources = SyncThread.master_resources_dict[resource_type] + else: + m_resources = self.get_master_resources(resource_type) + if m_resources is not None: + SyncThread.master_resources_dict[resource_type] = m_resources + return m_resources + def get_subcloud_resources(self, resource_type): return None diff --git a/distributedcloud/dcorch/snmp/controller.py b/distributedcloud/dcorch/snmp/controller.py index 6e3df3d87..3d425badb 100644 --- a/distributedcloud/dcorch/snmp/controller.py +++ b/distributedcloud/dcorch/snmp/controller.py @@ -15,7 +15,7 @@ import collections import datetime -from dcorch.common import consts +from dccommon import consts as dccommon_consts from dcorch.common import context from dcorch.rpc import client as rpc_client from multiprocessing import Queue @@ -57,7 +57,7 @@ class Controller(object): self.system_trap_tstamps[system] = collections.deque() def handle_trap(self, system, msg): - if system == consts.CLOUD_0: + if system == dccommon_consts.CLOUD_0: return if not (system in self.system_last_updates): self._add_system(system) diff --git a/distributedcloud/setup.cfg b/distributedcloud/setup.cfg index 82126e9fa..712e552d7 100644 --- a/distributedcloud/setup.cfg +++ b/distributedcloud/setup.cfg @@ -21,6 +21,7 @@ classifier = [files] packages = + dccommon dcmanager dcorch dcdbsync diff --git a/distributedcloud/tox.ini b/distributedcloud/tox.ini index 32db63834..1106a991f 100644 --- a/distributedcloud/tox.ini +++ b/distributedcloud/tox.ini @@ -181,7 +181,7 @@ deps = {[testenv:py27]deps} -e{[dc]cgcs_patch_src_dir} commands = - pylint {posargs} dcmanager dcorch dcdbsync --rcfile=./pylint.rc + pylint {posargs} dccommon dcmanager dcorch dcdbsync --rcfile=./pylint.rc [testenv:linters] basepython = python3