USM deploy state

This change introduced state machines for release state, deploy state
and deploy host state.

This change removed the direct reference to the software metadata from
software-controller and other modules. Replaced with encapuslated
release_data module.

Also include changes:
1. removed required parameter for software deploy activate and software
deploy complete RestAPI.
2. ensure reload metadata for each request
3. added feed_repo and commit-id to the deploy entity, to be
   subsequently passed to deploy operations.
4. fix issues

TCs:
    passed: software upload major and patching releases
    passed: software deploy start major and patching releases
    passed: software deploy host (mock) major and patching release
    passed: software activate major and patching release
    passed: software complete major release and patching release
    passed: redeploy host after host deploy failed both major and
patching release

Story: 2010676
Task: 49849

Change-Id: I4b1894560eccb8ef4f613633a73bf3887b2b93fb
Signed-off-by: Bin Qian <bin.qian@windriver.com>
This commit is contained in:
Bin Qian 2024-03-12 22:27:25 +00:00
parent 2bcfb854e4
commit ebe177d918
26 changed files with 1648 additions and 1257 deletions

View File

@ -5,6 +5,7 @@ Maintainer: StarlingX Developers <StarlingX-discuss@lists.StarlingX.io>
Build-Depends: debhelper-compat (= 13), Build-Depends: debhelper-compat (= 13),
dh-python, dh-python,
python3-all, python3-all,
python3-tabulate,
python3-setuptools, python3-setuptools,
python3-wheel python3-wheel
Build-Depends-Indep: Build-Depends-Indep:

View File

@ -7,4 +7,5 @@ oslo.serialization
netaddr netaddr
pecan pecan
requests_toolbelt requests_toolbelt
tabulate
WebOb WebOb

View File

@ -21,15 +21,36 @@ import json
import os import os
import re import re
import textwrap import textwrap
from tabulate import tabulate
from oslo_utils import importutils from oslo_utils import importutils
from six.moves import zip from six.moves import zip
from software_client.common.http_errors import HTTP_ERRORS from software_client.common.http_errors import HTTP_ERRORS
# TODO(bqian) remove below overrides when switching to
# system command style CLI display for USM CLI is ready
from tabulate import _table_formats
from tabulate import TableFormat
from tabulate import Line
from tabulate import DataRow
simple = TableFormat(
lineabove=Line("", "-", " ", ""),
linebelowheader=Line("", "=", " ", ""),
linebetweenrows=None,
linebelow=Line("", "-", " ", ""),
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0,
with_header_hide=["lineabove", "linebelow"],
)
# _table_formats['pretty'] = simple
#####################################################
TERM_WIDTH = 72 TERM_WIDTH = 72
class HelpFormatter(argparse.HelpFormatter): class HelpFormatter(argparse.HelpFormatter):
def start_section(self, heading): def start_section(self, heading):
# Title-case the headings # Title-case the headings
@ -158,6 +179,41 @@ def display_info(resp):
_display_info(text) _display_info(text)
def display_result_list(header_data_list, data):
header = [h for h in header_data_list]
table = []
for d in data:
row = []
for _, k in header_data_list.items():
row.append(d[k])
table.append(row)
if len(table) == 0:
print("No data")
else:
print(tabulate(table, header, tablefmt='pretty', colalign=("left", "left")))
def display_detail_result(data):
header = ["Property", "Value"]
table = []
for k, v in data.items():
if isinstance(v, list):
if len(v) > 0:
row = [k, v[0]]
v.pop(0)
else:
row = [k, '']
table.append(row)
for r in v:
row = ['', r]
table.append(row)
else:
row = [k, v]
table.append(row)
print(tabulate(table, header, tablefmt='pretty', colalign=("left", "left")))
def print_result_list(header_data_list, data_list, has_error, sort_key=0): def print_result_list(header_data_list, data_list, has_error, sort_key=0):
""" """
Print a list of data in a simple table format Print a list of data in a simple table format

View File

@ -10,36 +10,11 @@ CONTROLLER_FLOATING_HOSTNAME = "controller"
SOFTWARE_STORAGE_DIR = "/opt/software" SOFTWARE_STORAGE_DIR = "/opt/software"
AVAILABLE_DIR = "%s/metadata/available" % SOFTWARE_STORAGE_DIR
UNAVAILABLE_DIR = "%s/metadata/unavailable" % SOFTWARE_STORAGE_DIR
DEPLOYING_START_DIR = "%s/metadata/deploying_start" % SOFTWARE_STORAGE_DIR
DEPLOYING_HOST_DIR = "%s/metadata/deploying_host" % SOFTWARE_STORAGE_DIR
DEPLOYING_ACTIVATE_DIR = "%s/metadata/deploying_activate" % SOFTWARE_STORAGE_DIR
DEPLOYING_COMPLETE_DIR = "%s/metadata/deploying_complete" % SOFTWARE_STORAGE_DIR
DEPLOYED_DIR = "%s/metadata/deployed" % SOFTWARE_STORAGE_DIR
REMOVING_DIR = "%s/metadata/removing" % SOFTWARE_STORAGE_DIR
ABORTING_DIR = "%s/metadata/aborting" % SOFTWARE_STORAGE_DIR
COMMITTED_DIR = "%s/metadata/committed" % SOFTWARE_STORAGE_DIR
SEMANTICS_DIR = "%s/semantics" % SOFTWARE_STORAGE_DIR
PATCH_AGENT_STATE_IDLE = "idle" PATCH_AGENT_STATE_IDLE = "idle"
PATCH_AGENT_STATE_INSTALLING = "installing" PATCH_AGENT_STATE_INSTALLING = "installing"
PATCH_AGENT_STATE_INSTALL_FAILED = "install-failed" PATCH_AGENT_STATE_INSTALL_FAILED = "install-failed"
PATCH_AGENT_STATE_INSTALL_REJECTED = "install-rejected" PATCH_AGENT_STATE_INSTALL_REJECTED = "install-rejected"
ABORTING = 'aborting'
AVAILABLE = 'available'
COMMITTED = 'committed'
DEPLOYED = 'deployed'
DEPLOYING_ACTIVATE = 'deploying-activate'
DEPLOYING_COMPLETE = 'deploying-complete'
DEPLOYING_HOST = 'deploying-host'
DEPLOYING_START = 'deploying-start'
REMOVING = 'removing'
UNAVAILABLE = 'unavailable'
UNKNOWN = 'n/a'
STATUS_DEVELOPEMENT = 'DEV' STATUS_DEVELOPEMENT = 'DEV'
STATUS_OBSOLETE = 'OBS' STATUS_OBSOLETE = 'OBS'
STATUS_RELEASED = 'REL' STATUS_RELEASED = 'REL'
@ -61,9 +36,11 @@ PATCH_EXTENSION = ".patch"
SUPPORTED_UPLOAD_FILE_EXT = [ISO_EXTENSION, SIG_EXTENSION, PATCH_EXTENSION] SUPPORTED_UPLOAD_FILE_EXT = [ISO_EXTENSION, SIG_EXTENSION, PATCH_EXTENSION]
SCRATCH_DIR = "/scratch" SCRATCH_DIR = "/scratch"
# host deploy state
DEPLOYING = 'deploying' DEPLOYING = 'deploying'
FAILED = 'failed' FAILED = 'failed'
PENDING = 'pending' PENDING = 'pending'
DEPLOYED = 'deployed'
# Authorization modes of software cli # Authorization modes of software cli
KEYSTONE = 'keystone' KEYSTONE = 'keystone'

View File

@ -54,10 +54,10 @@ class DeployManager(base.Manager):
def host(self, args): def host(self, args):
# args.deployment is a string # args.deployment is a string
agent_ip = args.agent hostname = args.host
# Issue deploy_host request and poll for results # Issue deploy_host request and poll for results
path = "/v1/software/deploy_host/%s" % (agent_ip) path = "/v1/software/deploy_host/%s" % (hostname)
if args.force: if args.force:
path += "/force" path += "/force"
@ -69,7 +69,8 @@ class DeployManager(base.Manager):
print(data["error"]) print(data["error"])
rc = 1 rc = 1
else: else:
rc = self.wait_for_install_complete(agent_ip) # NOTE(bqian) should consider return host_list instead.
rc = self.wait_for_install_complete(hostname)
elif req.status_code == 500: elif req.status_code == 500:
print("An internal error has occurred. " print("An internal error has occurred. "
"Please check /var/log/software.log for details") "Please check /var/log/software.log for details")
@ -84,26 +85,20 @@ class DeployManager(base.Manager):
return rc return rc
def activate(self, args): def activate(self, args):
# args.deployment is a string
deployment = args.deployment
# Ignore interrupts during this function # Ignore interrupts during this function
signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN)
# Issue deploy_start request # Issue deploy_start request
path = "/v1/software/deploy_activate/%s" % (deployment) path = "/v1/software/deploy_activate"
return self._create(path, body={}) return self._create(path, body={})
def complete(self, args): def complete(self, args):
# args.deployment is a string
deployment = args.deployment
# Ignore interrupts during this function # Ignore interrupts during this function
signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN)
# Issue deploy_start request # Issue deploy_start request
path = "/v1/software/deploy_complete/%s" % (deployment) path = "/v1/software/deploy_complete/"
return self._create(path, body={}) return self._create(path, body={})
@ -113,40 +108,9 @@ class DeployManager(base.Manager):
def show(self): def show(self):
path = '/v1/software/deploy' path = '/v1/software/deploy'
req, data = self._list(path, "") return self._list(path, "")
if req.status_code >= 500: def wait_for_install_complete(self, hostname):
print("An internal error has occurred. Please check /var/log/software.log for details")
return 1
elif req.status_code >= 400:
print("Respond code %d. Error: %s" % (req.status_code, req.reason))
return 1
if not data:
print("No deploy in progress.")
else:
data = data[0]
data["reboot_required"] = "Yes" if data.get("reboot_required") else "No"
data_list = [[k, v] for k, v in data.items()]
transposed_data_list = list(zip(*data_list))
transposed_data_list[0] = [s.title().replace('_', ' ') for s in transposed_data_list[0]]
# Find the longest header string in each column
header_lengths = [len(str(x)) for x in transposed_data_list[0]]
# Find the longest content string in each column
content_lengths = [len(str(x)) for x in transposed_data_list[1]]
# Find the max of the two for each column
col_lengths = [(x if x > y else y) for x, y in zip(header_lengths, content_lengths)]
print(' '.join(f"{x.center(col_lengths[i])}" for i,
x in enumerate(transposed_data_list[0])))
print(' '.join('=' * length for length in col_lengths))
print(' '.join(f"{x.center(col_lengths[i])}" for i,
x in enumerate(transposed_data_list[1])))
return 0
def wait_for_install_complete(self, agent_ip):
url = "/v1/software/host_list" url = "/v1/software/host_list"
rc = 0 rc = 0
@ -163,55 +127,49 @@ class DeployManager(base.Manager):
except requests.exceptions.ConnectionError: except requests.exceptions.ConnectionError:
# The local software-controller may have restarted. # The local software-controller may have restarted.
retriable_count += 1 retriable_count += 1
if retriable_count <= max_retries: if retriable_count > max_retries:
continue
else:
print("Lost communications with the software controller") print("Lost communications with the software controller")
rc = 1 rc = 1
break return rc
if req.status_code == 200:
data = data.get("data", None)
if not data:
print("Invalid host-list data returned:")
utils.print_result_debug(req, data)
rc = 1
break
host_state = None
for d in data:
if d['hostname'] == agent_ip:
host_state = d.get('host_state')
if host_state == constants.DEPLOYING:
# Still deploying
sys.stdout.write(".")
sys.stdout.flush()
elif host_state == constants.FAILED:
print("\nDeployment failed. Please check logs for details.")
rc = 1
break
elif host_state == constants.DEPLOYED:
print("\nDeployment was successful.")
rc = 0
break
else:
print("\nReported unknown state: %s" % host_state)
rc = 1
break
elif req.status_code == 500:
print("An internal error has occurred. Please check /var/log/software.log for details")
rc = 1
break
else: else:
m = re.search("(Error message:.*)", req.text, re.MULTILINE)
if m:
print(m.group(0))
else:
print(vars(req))
rc = 1
break break
if req.status_code == 200:
if not data:
print("Invalid host-list data returned:")
utils.print_result_debug(req, data)
rc = 1
host_state = None
for d in data:
if d['hostname'] == hostname:
host_state = d.get('host_state')
if host_state == constants.DEPLOYING:
print("\nDeployment started.")
rc = 0
elif host_state == constants.FAILED:
print("\nDeployment failed. Please check logs for details.")
rc = 1
elif host_state == constants.DEPLOYED:
print("\nDeployment was successful.")
rc = 0
elif host_state == constants.PENDING:
print("\nDeployment pending.")
else:
print("\nReported unknown state: %s" % host_state)
rc = 1
elif req.status_code == 500:
print("An internal error has occurred. Please check /var/log/software.log for details")
rc = 1
else:
m = re.search("(Error message:.*)", req.text, re.MULTILINE)
if m:
print(m.group(0))
else:
print(vars(req))
rc = 1
return rc return rc

View File

@ -21,20 +21,26 @@ from software_client.common import utils
help="List all deployments that have this state") help="List all deployments that have this state")
def do_show(cc, args): def do_show(cc, args):
"""Show the software deployments states""" """Show the software deployments states"""
# TODO(bqian) modify the cli to display with generic tabulated output resp, data = cc.deploy.show()
return cc.deploy.show() if args.debug:
utils.print_result_debug(resp, data)
else:
header_data_list = {"From Release": "from_release", "To Release": "to_release", "RR": "reboot_required", "State": "state"}
utils.display_result_list(header_data_list, data)
return utils.check_rc(resp, data)
def do_host_list(cc, args): def do_host_list(cc, args):
"""List of hosts for software deployment """ """List of hosts for software deployment """
req, data = cc.deploy.host_list() resp, data = cc.deploy.host_list()
# TODO(bqian) modify display with generic tabulated output
if args.debug: if args.debug:
utils.print_result_debug(req, data) utils.print_result_debug(resp, data)
else: else:
utils.print_software_deploy_host_list_result(req, data) header_data_list = {"Host": "hostname", "From Release": "software_release", "To Release": "target_release", "RR": "reboot_required", "State": "host_state"}
utils.display_result_list(header_data_list, data)
return utils.check_rc(req, data) return utils.check_rc(resp, data)
@utils.arg('deployment', @utils.arg('deployment',
@ -68,17 +74,17 @@ def do_precheck(cc, args):
help='Allow bypassing non-critical checks') help='Allow bypassing non-critical checks')
def do_start(cc, args): def do_start(cc, args):
"""Start the software deployment""" """Start the software deployment"""
req, data = cc.deploy.start(args) resp, data = cc.deploy.start(args)
if args.debug: if args.debug:
utils.print_result_debug(req, data) utils.print_result_debug(resp, data)
else: else:
utils.print_software_op_result(req, data) utils.display_info(resp)
return utils.check_rc(req, data) return utils.check_rc(resp, data)
@utils.arg('agent', @utils.arg('host',
help="Agent on which host deploy is triggered") help="Name of the host that the deploy is triggered")
@utils.arg('-f', @utils.arg('-f',
'--force', '--force',
action='store_true', action='store_true',
@ -89,8 +95,6 @@ def do_host(cc, args):
return cc.deploy.host(args) return cc.deploy.host(args)
@utils.arg('deployment',
help='Deployment ID to activate')
def do_activate(cc, args): def do_activate(cc, args):
"""Activate the software deployment""" """Activate the software deployment"""
req, data = cc.deploy.activate(args) req, data = cc.deploy.activate(args)
@ -101,8 +105,6 @@ def do_activate(cc, args):
return utils.check_rc(req, data) return utils.check_rc(req, data)
@utils.arg('deployment',
help='Deployment ID to complete')
def do_complete(cc, args): def do_complete(cc, args):
"""Complete the software deployment""" """Complete the software deployment"""
req, data = cc.deploy.complete(args) req, data = cc.deploy.complete(args)

View File

@ -22,10 +22,8 @@ def do_list(cc, args):
if args.debug: if args.debug:
utils.print_result_debug(req, data) utils.print_result_debug(req, data)
else: else:
header_data_list = ["Release", "RR", "State"] header_data_list = {"Release": "release_id", "RR": "reboot_required", "State": "state"}
data_list = [(k, v["reboot_required"], v["state"]) for k, v in data["sd"].items()] utils.display_result_list(header_data_list, data)
has_error = 'error' in data and data["error"]
utils.print_result_list(header_data_list, data_list, has_error)
return utils.check_rc(req, data) return utils.check_rc(req, data)
@ -45,7 +43,8 @@ def do_show(cc, args):
if args.debug: if args.debug:
utils.print_result_debug(req, data) utils.print_result_debug(req, data)
else: else:
utils.print_release_show_result(req, data, list_packages=list_packages) for d in data:
utils.display_detail_result(d)
return utils.check_rc(req, data) return utils.check_rc(req, data)

View File

@ -100,7 +100,7 @@ class HealthCheck(object):
print("Could not check required patches...") print("Could not check required patches...")
return False, required_patches return False, required_patches
applied_patches = list(response.json()["sd"].keys()) applied_patches = [release['release_id'] for release in response.json()]
missing_patch = list(set(required_patches) - set(applied_patches)) missing_patch = list(set(required_patches) - set(applied_patches))
if missing_patch: if missing_patch:
success = False success = False

View File

@ -240,6 +240,8 @@ class DataMigration(object):
platform_config_dir = os.path.join(PLATFORM_PATH, "config") platform_config_dir = os.path.join(PLATFORM_PATH, "config")
from_config_dir = os.path.join(platform_config_dir, self.from_release) from_config_dir = os.path.join(platform_config_dir, self.from_release)
to_config_dir = os.path.join(platform_config_dir, self.to_release) to_config_dir = os.path.join(platform_config_dir, self.to_release)
if os.path.isdir(to_config_dir):
shutil.rmtree(to_config_dir)
shutil.copytree(from_config_dir, to_config_dir) shutil.copytree(from_config_dir, to_config_dir)
except Exception as e: except Exception as e:
LOG.exception("Failed to create platform config for release %s. " LOG.exception("Failed to create platform config for release %s. "

View File

@ -12,11 +12,12 @@ from pecan import expose
from pecan import request from pecan import request
import shutil import shutil
from software import constants
from software.exceptions import SoftwareError from software.exceptions import SoftwareError
from software.exceptions import SoftwareServiceError from software.exceptions import SoftwareServiceError
from software.release_data import reload_release_data
from software.software_controller import sc from software.software_controller import sc
import software.utils as utils from software import utils
import software.constants as constants
LOG = logging.getLogger('main_logger') LOG = logging.getLogger('main_logger')
@ -26,6 +27,7 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
def commit_patch(self, *args): def commit_patch(self, *args):
reload_release_data()
result = sc.patch_commit(list(args)) result = sc.patch_commit(list(args))
sc.software_sync() sc.software_sync()
@ -33,12 +35,14 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
def commit_dry_run(self, *args): def commit_dry_run(self, *args):
reload_release_data()
result = sc.patch_commit(list(args), dry_run=True) result = sc.patch_commit(list(args), dry_run=True)
return result return result
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def delete(self, *args): def delete(self, *args):
reload_release_data()
result = sc.software_release_delete_api(list(args)) result = sc.software_release_delete_api(list(args))
sc.software_sync() sc.software_sync()
@ -46,28 +50,26 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def deploy_activate(self, *args): def deploy_activate(self):
if sc.any_patch_host_installing(): reload_release_data()
raise SoftwareServiceError(error="Rejected: One or more nodes are installing a release.")
result = sc.software_deploy_activate_api(list(args)[0]) result = sc.software_deploy_activate_api()
sc.software_sync() sc.software_sync()
return result return result
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def deploy_complete(self, *args): def deploy_complete(self):
if sc.any_patch_host_installing(): reload_release_data()
raise SoftwareServiceError(error="Rejected: One or more nodes are installing a release.")
result = sc.software_deploy_complete_api(list(args)[0])
result = sc.software_deploy_complete_api()
sc.software_sync() sc.software_sync()
return result return result
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def deploy_host(self, *args): def deploy_host(self, *args):
reload_release_data()
if len(list(args)) == 0: if len(list(args)) == 0:
return dict(error="Host must be specified for install") return dict(error="Host must be specified for install")
force = False force = False
@ -81,6 +83,7 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def deploy_precheck(self, *args, **kwargs): def deploy_precheck(self, *args, **kwargs):
reload_release_data()
force = False force = False
if 'force' in list(args): if 'force' in list(args):
force = True force = True
@ -92,6 +95,7 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def deploy_start(self, *args, **kwargs): def deploy_start(self, *args, **kwargs):
reload_release_data()
# if --force is provided # if --force is provided
force = 'force' in list(args) force = 'force' in list(args)
@ -107,6 +111,7 @@ class SoftwareAPIController(object):
@expose('json', method="GET") @expose('json', method="GET")
def deploy(self): def deploy(self):
reload_release_data()
from_release = request.GET.get("from_release") from_release = request.GET.get("from_release")
to_release = request.GET.get("to_release") to_release = request.GET.get("to_release")
result = sc.software_deploy_show_api(from_release, to_release) result = sc.software_deploy_show_api(from_release, to_release)
@ -115,25 +120,30 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def install_local(self): def install_local(self):
reload_release_data()
result = sc.software_install_local_api() result = sc.software_install_local_api()
return result return result
@expose('json') @expose('json')
def is_available(self, *args): def is_available(self, *args):
reload_release_data()
return sc.is_available(list(args)) return sc.is_available(list(args))
@expose('json') @expose('json')
def is_committed(self, *args): def is_committed(self, *args):
reload_release_data()
return sc.is_committed(list(args)) return sc.is_committed(list(args))
@expose('json') @expose('json')
def is_deployed(self, *args): def is_deployed(self, *args):
reload_release_data()
return sc.is_deployed(list(args)) return sc.is_deployed(list(args))
@expose('json') @expose('json')
@expose('show.xml', content_type='application/xml') @expose('show.xml', content_type='application/xml')
def show(self, *args): def show(self, *args):
reload_release_data()
result = sc.software_release_query_specific_cached(list(args)) result = sc.software_release_query_specific_cached(list(args))
return result return result
@ -141,6 +151,7 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def upload(self): def upload(self):
reload_release_data()
is_local = False is_local = False
temp_dir = None temp_dir = None
uploaded_files = [] uploaded_files = []
@ -186,12 +197,13 @@ class SoftwareAPIController(object):
@expose('json') @expose('json')
@expose('query.xml', content_type='application/xml') @expose('query.xml', content_type='application/xml')
def query(self, **kwargs): def query(self, **kwargs):
reload_release_data()
sd = sc.software_release_query_cached(**kwargs) sd = sc.software_release_query_cached(**kwargs)
return sd
return dict(sd=sd)
@expose('json', method="GET") @expose('json', method="GET")
def host_list(self): def host_list(self):
reload_release_data()
result = sc.deploy_host_list() result = sc.deploy_host_list()
return result return result

View File

@ -4,7 +4,6 @@ Copyright (c) 2023-2024 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0 SPDX-License-Identifier: Apache-2.0
""" """
from enum import Enum
import os import os
try: try:
# The tsconfig module is only available at runtime # The tsconfig module is only available at runtime
@ -34,91 +33,8 @@ RC_UNHEALTHY = 3
DEPLOY_PRECHECK_SCRIPT = "deploy-precheck" DEPLOY_PRECHECK_SCRIPT = "deploy-precheck"
DEPLOY_START_SCRIPT = "software-deploy-start" DEPLOY_START_SCRIPT = "software-deploy-start"
AVAILABLE_DIR = "%s/metadata/available" % SOFTWARE_STORAGE_DIR
UNAVAILABLE_DIR = "%s/metadata/unavailable" % SOFTWARE_STORAGE_DIR
DEPLOYING_DIR = "%s/metadata/deploying" % SOFTWARE_STORAGE_DIR
DEPLOYED_DIR = "%s/metadata/deployed" % SOFTWARE_STORAGE_DIR
REMOVING_DIR = "%s/metadata/removing" % SOFTWARE_STORAGE_DIR
# TODO(bqian) states to be removed once current references are removed
DEPLOYING_START_DIR = "%s/metadata/deploying_start" % SOFTWARE_STORAGE_DIR
DEPLOYING_HOST_DIR = "%s/metadata/deploying_host" % SOFTWARE_STORAGE_DIR
DEPLOYING_ACTIVATE_DIR = "%s/metadata/deploying_activate" % SOFTWARE_STORAGE_DIR
DEPLOYING_COMPLETE_DIR = "%s/metadata/deploying_complete" % SOFTWARE_STORAGE_DIR
ABORTING_DIR = "%s/metadata/aborting" % SOFTWARE_STORAGE_DIR
COMMITTED_DIR = "%s/metadata/committed" % SOFTWARE_STORAGE_DIR
SEMANTICS_DIR = "%s/semantics" % SOFTWARE_STORAGE_DIR SEMANTICS_DIR = "%s/semantics" % SOFTWARE_STORAGE_DIR
DEPLOY_STATE_METADATA_DIR = \
[
AVAILABLE_DIR,
UNAVAILABLE_DIR,
DEPLOYING_DIR,
DEPLOYED_DIR,
REMOVING_DIR,
# TODO(bqian) states to be removed once current references are removed
DEPLOYING_START_DIR,
DEPLOYING_HOST_DIR,
DEPLOYING_ACTIVATE_DIR,
DEPLOYING_COMPLETE_DIR,
ABORTING_DIR,
COMMITTED_DIR,
]
# new release state needs to be added to VALID_RELEASE_STATES list
AVAILABLE = 'available'
UNAVAILABLE = 'unavailable'
DEPLOYING = 'deploying'
DEPLOYED = 'deployed'
REMOVING = 'removing'
DELETABLE_STATE = [AVAILABLE, UNAVAILABLE]
# TODO(bqian) states to be removed once current references are removed
ABORTING = 'aborting'
COMMITTED = 'committed'
DEPLOYING_ACTIVATE = 'deploying-activate'
DEPLOYING_COMPLETE = 'deploying-complete'
DEPLOYING_HOST = 'deploying-host'
DEPLOYING_START = 'deploying-start'
UNAVAILABLE = 'unavailable'
UNKNOWN = 'n/a'
VALID_DEPLOY_START_STATES = [
AVAILABLE,
DEPLOYED,
]
# host deploy substate
HOST_DEPLOY_PENDING = 'pending'
HOST_DEPLOY_STARTED = 'deploy-started'
HOST_DEPLOY_DONE = 'deploy-done'
HOST_DEPLOY_FAILED = 'deploy-failed'
VALID_HOST_DEPLOY_STATE = [
HOST_DEPLOY_PENDING,
HOST_DEPLOY_STARTED,
HOST_DEPLOY_DONE,
HOST_DEPLOY_FAILED
]
VALID_RELEASE_STATES = [AVAILABLE, UNAVAILABLE, DEPLOYING, DEPLOYED,
REMOVING]
RELEASE_STATE_TO_DIR_MAP = {AVAILABLE: AVAILABLE_DIR,
UNAVAILABLE: UNAVAILABLE_DIR,
DEPLOYING: DEPLOYING_DIR,
DEPLOYED: DEPLOYED_DIR,
REMOVING: REMOVING_DIR}
# valid release state transition below could still be changed as
# development continue
RELEASE_STATE_VALID_TRANSITION = {
AVAILABLE: [DEPLOYING],
DEPLOYING: [DEPLOYED],
DEPLOYED: [REMOVING, UNAVAILABLE]
}
STATUS_DEVELOPEMENT = 'DEV' STATUS_DEVELOPEMENT = 'DEV'
STATUS_OBSOLETE = 'OBS' STATUS_OBSOLETE = 'OBS'
STATUS_RELEASED = 'REL' STATUS_RELEASED = 'REL'
@ -147,11 +63,6 @@ SEMANTIC_ACTIONS = [SEMANTIC_PREAPPLY, SEMANTIC_PREREMOVE]
CHECKOUT_FOLDER = "checked_out_commit" CHECKOUT_FOLDER = "checked_out_commit"
DEPLOYMENT_STATE_ACTIVE = "Active"
DEPLOYMENT_STATE_INACTIVE = "Inactive"
DEPLOYMENT_STATE_PRESTAGING = "Prestaging"
DEPLOYMENT_STATE_PRESTAGED = "Prestaged"
FEED_DIR = "/var/www/pages/feed/" FEED_DIR = "/var/www/pages/feed/"
UPGRADE_FEED_DIR = FEED_DIR UPGRADE_FEED_DIR = FEED_DIR
TMP_DIR = "/tmp" TMP_DIR = "/tmp"
@ -183,23 +94,3 @@ LAST_IN_SYNC = "last_in_sync"
SYSTEM_MODE_SIMPLEX = "simplex" SYSTEM_MODE_SIMPLEX = "simplex"
SYSTEM_MODE_DUPLEX = "duplex" SYSTEM_MODE_DUPLEX = "duplex"
class DEPLOY_STATES(Enum):
ACTIVATE = 'activate'
ACTIVATE_DONE = 'activate-done'
ACTIVATE_FAILED = 'activate-failed'
START = 'start'
START_DONE = 'start-done'
START_FAILED = 'start-failed'
HOST = 'host'
HOST_DONE = 'host-done'
HOST_FAILED = 'host-failed'
class DEPLOY_HOST_STATES(Enum):
DEPLOYED = 'deployed'
DEPLOYING = 'deploying'
FAILED = 'failed'
PENDING = 'pending'

View File

@ -9,7 +9,7 @@ import logging
import threading import threading
from software.software_entities import DeployHandler from software.software_entities import DeployHandler
from software.software_entities import DeployHostHandler from software.software_entities import DeployHostHandler
from software.constants import DEPLOY_STATES from software.states import DEPLOY_STATES
LOG = logging.getLogger('main_logger') LOG = logging.getLogger('main_logger')
@ -32,9 +32,9 @@ class SoftwareAPI:
self.deploy_handler = DeployHandler() self.deploy_handler = DeployHandler()
self.deploy_host_handler = DeployHostHandler() self.deploy_host_handler = DeployHostHandler()
def create_deploy(self, from_release, to_release, reboot_required: bool): def create_deploy(self, from_release, to_release, feed_repo, commit_id, reboot_required: bool):
self.begin_update() self.begin_update()
self.deploy_handler.create(from_release, to_release, reboot_required) self.deploy_handler.create(from_release, to_release, feed_repo, commit_id, reboot_required)
self.end_update() self.end_update()
def get_deploy(self, from_release, to_release): def get_deploy(self, from_release, to_release):
@ -79,6 +79,13 @@ class SoftwareAPI:
finally: finally:
self.end_update() self.end_update()
def get_deploy_host_by_hostname(self, hostname):
self.begin_update()
try:
return self.deploy_host_handler.query(hostname)
finally:
self.end_update()
def update_deploy_host(self, hostname, state): def update_deploy_host(self, hostname, state):
self.begin_update() self.begin_update()
try: try:

View File

@ -0,0 +1,69 @@
"""
Copyright (c) 2024 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""
import logging
from software.db.api import get_instance
from software.exceptions import InvalidOperation
from software.states import DEPLOY_HOST_STATES
LOG = logging.getLogger('main_logger')
deploy_host_state_transition = {
DEPLOY_HOST_STATES.PENDING: [DEPLOY_HOST_STATES.DEPLOYING],
DEPLOY_HOST_STATES.DEPLOYING: [DEPLOY_HOST_STATES.DEPLOYED, DEPLOY_HOST_STATES.FAILED],
DEPLOY_HOST_STATES.FAILED: [DEPLOY_HOST_STATES.DEPLOYING],
DEPLOY_HOST_STATES.DEPLOYED: []
}
class DeployHostState(object):
_callbacks = []
@staticmethod
def register_event_listener(callback):
if callback not in DeployHostState._callbacks:
LOG.info("Register event listener %s", callback.__qualname__)
DeployHostState._callbacks.append(callback)
def __init__(self, hostname):
self._hostname = hostname
def check_transition(self, target_state: DEPLOY_HOST_STATES):
db_api = get_instance()
deploy_host = db_api.get_deploy_host_by_hostname(self._hostname)
if deploy_host is not None:
cur_state = DEPLOY_HOST_STATES(deploy_host['state'])
if target_state in deploy_host_state_transition[cur_state]:
return True
else:
LOG.error('Host %s is not part of deployment' % self._hostname)
return False
def transform(self, target_state: DEPLOY_HOST_STATES):
db_api = get_instance()
db_api.begin_update()
try:
if self.check_transition(target_state):
db_api.update_deploy_host(self._hostname, target_state)
for callback in DeployHostState._callbacks:
callback(self._hostname, target_state)
else:
msg = "Host can not transform to %s from current state" % target_state.value
raise InvalidOperation(msg)
finally:
db_api.end_update()
def deploy_started(self):
self.transform(DEPLOY_HOST_STATES.DEPLOYING)
def deployed(self):
self.transform(DEPLOY_HOST_STATES.DEPLOYED)
def deploy_failed(self):
self.transform(DEPLOY_HOST_STATES.FAILED)

View File

@ -0,0 +1,198 @@
"""
Copyright (c) 2024 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""
import logging
from software.db.api import get_instance
from software.exceptions import InvalidOperation
from software.release_data import SWRelease
from software.states import DEPLOY_STATES
from software.states import DEPLOY_HOST_STATES
LOG = logging.getLogger('main_logger')
deploy_state_transition = {
None: [DEPLOY_STATES.START], # Fake state for no deploy in progress
DEPLOY_STATES.START: [DEPLOY_STATES.START_DONE, DEPLOY_STATES.START_FAILED],
DEPLOY_STATES.START_FAILED: [DEPLOY_STATES.ABORT],
DEPLOY_STATES.ABORT: [DEPLOY_STATES.ABORT_DONE],
DEPLOY_STATES.START_DONE: [DEPLOY_STATES.ABORT, DEPLOY_STATES.HOST],
DEPLOY_STATES.HOST: [DEPLOY_STATES.HOST,
DEPLOY_STATES.ABORT,
DEPLOY_STATES.HOST_FAILED,
DEPLOY_STATES.HOST_DONE],
DEPLOY_STATES.HOST_FAILED: [DEPLOY_STATES.HOST, # deploy-host can reattempt
DEPLOY_STATES.ABORT,
DEPLOY_STATES.HOST_FAILED,
DEPLOY_STATES.HOST_DONE],
DEPLOY_STATES.HOST_DONE: [DEPLOY_STATES.ABORT, DEPLOY_STATES.ACTIVATE],
DEPLOY_STATES.ACTIVATE: [DEPLOY_STATES.ACTIVATE_DONE, DEPLOY_STATES.ACTIVATE_FAILED],
DEPLOY_STATES.ACTIVATE_DONE: [DEPLOY_STATES.ABORT, None], # abort after deploy-activated?
DEPLOY_STATES.ACTIVATE_FAILED: [DEPLOY_STATES.ACTIVATE, DEPLOY_STATES.ABORT],
DEPLOY_STATES.ABORT_DONE: [] # waitng for being deleted
}
class DeployState(object):
_callbacks = []
_instance = None
@staticmethod
def register_event_listener(callback):
"""register event listener to be triggered when a state transition is completed"""
if callback is not None:
if callback not in DeployState._callbacks:
LOG.debug("Register event listener %s", callback.__qualname__)
DeployState._callbacks.append(callback)
@staticmethod
def get_deploy_state():
db_api_instance = get_instance()
deploys = db_api_instance.get_deploy_all()
if not deploys:
state = None # No deploy in progress == None
else:
deploy = deploys[0]
state = DEPLOY_STATES(deploy['state'])
return state
@staticmethod
def get_instance():
if DeployState._instance is None:
DeployState._instance = DeployState()
return DeployState._instance
@staticmethod
def host_deploy_updated(_hostname, _host_new_state):
db_api_instance = get_instance()
deploy_hosts = db_api_instance.get_deploy_host()
deploy_state = DeployState.get_instance()
all_states = []
for deploy_host in deploy_hosts:
if deploy_host['state'] not in all_states:
all_states.append(deploy_host['state'])
LOG.info("Host deploy state %s" % str(all_states))
if DEPLOY_HOST_STATES.FAILED.value in all_states:
deploy_state.deploy_host_failed()
elif DEPLOY_HOST_STATES.PENDING.value in all_states or \
DEPLOY_HOST_STATES.DEPLOYING.value in all_states:
deploy_state.deploy_host()
elif all_states == [DEPLOY_HOST_STATES.DEPLOYED.value]:
deploy_state.deploy_host_completed()
def __init__(self):
self._from_release = None
self._to_release = None
self._reboot_required = None
def check_transition(self, target_state: DEPLOY_STATES):
cur_state = DeployState.get_deploy_state()
if cur_state is not None:
cur_state = DEPLOY_STATES(cur_state)
if target_state in deploy_state_transition[cur_state]:
return True
# TODO(bqian) reverse lookup the operation that is not permitted, as feedback
msg = f"Deploy state transform not permitted from {str(cur_state)} to {str(target_state)}"
LOG.info(msg)
return False
def transform(self, target_state: DEPLOY_STATES):
db_api = get_instance()
db_api.begin_update()
try:
if self.check_transition(target_state):
# None means not existing or deleting
if target_state is not None:
db_api.update_deploy(target_state)
else:
# TODO(bqian) check the current state, and provide guidence on what is
# the possible next operation
if target_state is None:
msg = "Deployment can not deleted in current state."
else:
msg = "Host can not transform to %s from current state" % target_state.value()
raise InvalidOperation(msg)
finally:
db_api.end_update()
for callback in DeployState._callbacks:
LOG.debug("Calling event listener %s", callback.__qualname__)
callback(target_state)
# below are list of events to drive the FSM
def start(self, from_release, to_release, feed_repo, commit_id, reboot_required):
# start is special, it needs to create the deploy entity
if isinstance(from_release, SWRelease):
from_release = from_release.sw_release
if isinstance(to_release, SWRelease):
to_release = to_release.sw_release
msg = f"Start deploy {to_release}, current sw {from_release}"
LOG.info(msg)
db_api_instance = get_instance()
db_api_instance.create_deploy(from_release, to_release, feed_repo, commit_id, reboot_required)
def start_failed(self):
self.transform(DEPLOY_STATES.START_FAILED)
def start_done(self):
self.transform(DEPLOY_STATES.START_DONE)
def deploy_host(self):
self.transform(DEPLOY_STATES.HOST)
def abort(self):
self.transform(DEPLOY_STATES.ABORT)
def deploy_host_completed(self):
# depends on the deploy state, the deploy can be transformed
# to HOST_DONE (from DEPLOY_HOST) or ABORT_DONE (ABORT)
state = DeployState.get_deploy_state()
if state == DEPLOY_STATES.ABORT:
self.transform(DEPLOY_STATES.ABORT_DONE)
else:
self.transform(DEPLOY_STATES.HOST_DONE)
def deploy_host_failed(self):
self.transform(DEPLOY_STATES.HOST_FAILED)
def activate(self):
self.transform(DEPLOY_STATES.ACTIVATE)
def activate_completed(self):
self.transform(DEPLOY_STATES.ACTIVATE_DONE)
def activate_failed(self):
self.transform(DEPLOY_STATES.ACTIVATE_FAILED)
def completed(self):
self.transform(None)
# delete the deploy and deploy host entities
db_api = get_instance()
db_api.begin_update()
try:
db_api.delete_deploy_host_all()
db_api.delete_deploy()
finally:
db_api.end_update()
def require_deploy_state(require_states, prompt):
def wrap(func):
def exec_op(*args, **kwargs):
state = DeployState.get_deploy_state()
if state in require_states:
res = func(*args, **kwargs)
return res
else:
msg = ""
if prompt:
msg = prompt.format(state=state, require_states=require_states)
raise InvalidOperation(msg)
return exec_op
return wrap

View File

@ -6,6 +6,57 @@ SPDX-License-Identifier: Apache-2.0
""" """
class InternalError(Exception):
"""This is an internal error aka bug"""
pass
class SoftwareServiceError(Exception):
"""
This is a service error, such as file system issue or configuration
issue, which is expected at design time for a valid reason.
This exception type will provide detail information to the user.
see ExceptionHook for detail
"""
def __init__(self, info="", warn="", error=""):
self._info = info
self._warn = warn
self._error = error
@property
def info(self):
return self._info if self._info is not None else ""
@property
def warning(self):
return self._warn if self._warn is not None else ""
@property
def error(self):
return self._error if self._error is not None else ""
class InvalidOperation(SoftwareServiceError):
"""Invalid operation, such as deploy a host that is already deployed """
def __init__(self, msg):
super().__init__(error=msg)
class ReleaseNotFound(SoftwareServiceError):
def __init__(self, release_ids):
if not isinstance(release_ids, list):
release_ids = [release_ids]
super().__init__(error="Release %s can not be found" % ', '.join(release_ids))
class HostNotFound(SoftwareServiceError):
def __init__(self, hostname):
super().__init__(error="Host %s can not be found" % hostname)
# TODO(bqian) gradually convert SoftwareError based exception to
# either SoftwareServiceError for user visible exceptions, or
# InternalError for internal error (bug)
class SoftwareError(Exception): class SoftwareError(Exception):
"""Base class for software exceptions.""" """Base class for software exceptions."""
@ -57,7 +108,7 @@ class SoftwareFail(SoftwareError):
pass pass
class ReleaseValidationFailure(SoftwareError): class ReleaseValidationFailure(SoftwareServiceError):
"""Release validation error.""" """Release validation error."""
pass pass
@ -67,7 +118,7 @@ class UpgradeNotSupported(SoftwareError):
pass pass
class ReleaseMismatchFailure(SoftwareError): class ReleaseMismatchFailure(SoftwareServiceError):
"""Release mismatch error.""" """Release mismatch error."""
pass pass
@ -128,33 +179,3 @@ class FileSystemError(SoftwareError):
Likely fixable by a root user. Likely fixable by a root user.
""" """
pass pass
class InternalError(Exception):
"""This is an internal error aka bug"""
pass
class SoftwareServiceError(Exception):
"""
This is a service error, such as file system issue or configuration
issue, which is expected at design time for a valid reason.
This exception type will provide detail information to the user.
see ExceptionHook for detail
"""
def __init__(self, info="", warn="", error=""):
self._info = info
self._warn = warn
self._error = error
@property
def info(self):
return self._info if self._info is not None else ""
@property
def warning(self):
return self._warn if self._warn is not None else ""
@property
def error(self):
return self._error if self._error is not None else ""

View File

@ -84,11 +84,12 @@ class ParsableErrorMiddleware(object):
# simple check xml is valid # simple check xml is valid
body = [et.ElementTree.tostring( body = [et.ElementTree.tostring(
et.ElementTree.fromstring('<error_message>' + et.ElementTree.fromstring('<error_message>' +
'\n'.join(app_iter) + '</error_message>'))] '\n'.join(app_iter) +
'</error_message>'))]
except et.ElementTree.ParseError as err: except et.ElementTree.ParseError as err:
LOG.error('Error parsing HTTP response: %s' % err) LOG.error('Error parsing HTTP response: %s' % err)
body = ['<error_message>%s' % state['status_code'] + body = ['<error_message>%s' % state['status_code'] +
'</error_message>'] '</error_message>']
state['headers'].append(('Content-Type', 'application/xml')) state['headers'].append(('Content-Type', 'application/xml'))
else: else:
if six.PY3: if six.PY3:

View File

@ -7,11 +7,13 @@
import os import os
from packaging import version from packaging import version
import shutil import shutil
from software import constants import threading
from software import states
from software.exceptions import FileSystemError from software.exceptions import FileSystemError
from software.exceptions import InternalError from software.exceptions import ReleaseNotFound
from software.software_functions import LOG from software.software_functions import LOG
from software import utils from software import utils
from software.software_functions import ReleaseData
class SWRelease(object): class SWRelease(object):
@ -22,6 +24,7 @@ class SWRelease(object):
self._metadata = metadata self._metadata = metadata
self._contents = contents self._contents = contents
self._sw_version = None self._sw_version = None
self._release = None
@property @property
def metadata(self): def metadata(self):
@ -40,21 +43,8 @@ class SWRelease(object):
return self.metadata['state'] return self.metadata['state']
@staticmethod @staticmethod
def is_valid_state_transition(from_state, to_state): def _ensure_state_transition(to_state):
if to_state not in constants.VALID_RELEASE_STATES: to_dir = states.RELEASE_STATE_TO_DIR_MAP[to_state]
msg = "Invalid state %s." % to_state
LOG.error(msg)
# this is a bug
raise InternalError(msg)
if from_state in constants.RELEASE_STATE_VALID_TRANSITION:
if to_state in constants.RELEASE_STATE_VALID_TRANSITION[from_state]:
return True
return False
@staticmethod
def ensure_state_transition(to_state):
to_dir = constants.RELEASE_STATE_TO_DIR_MAP[to_state]
if not os.path.isdir(to_dir): if not os.path.isdir(to_dir):
try: try:
os.makedirs(to_dir, mode=0o755, exist_ok=True) os.makedirs(to_dir, mode=0o755, exist_ok=True)
@ -63,27 +53,27 @@ class SWRelease(object):
raise FileSystemError(error) raise FileSystemError(error)
def update_state(self, state): def update_state(self, state):
if SWRelease.is_valid_state_transition(self.state, state): LOG.info("%s state from %s to %s" % (self.id, self.state, state))
LOG.info("%s state from %s to %s" % (self.id, self.state, state)) SWRelease._ensure_state_transition(state)
SWRelease.ensure_state_transition(state)
to_dir = constants.RELEASE_STATE_TO_DIR_MAP[state] to_dir = states.RELEASE_STATE_TO_DIR_MAP[state]
from_dir = constants.RELEASE_STATE_TO_DIR_MAP[self.state] from_dir = states.RELEASE_STATE_TO_DIR_MAP[self.state]
try: try:
shutil.move("%s/%s-metadata.xml" % (from_dir, self.id), shutil.move("%s/%s-metadata.xml" % (from_dir, self.id),
"%s/%s-metadata.xml" % (to_dir, self.id)) "%s/%s-metadata.xml" % (to_dir, self.id))
except shutil.Error: except shutil.Error:
msg = "Failed to move the metadata for %s" % self.id msg = "Failed to move the metadata for %s" % self.id
LOG.exception(msg) LOG.exception(msg)
raise FileSystemError(msg) raise FileSystemError(msg)
self.metadata['state'] = state self.metadata['state'] = state
else:
# this is a bug @property
error = "Invalid state transition %s, current is %s, target state is %s" % \ def version_obj(self):
(self.id, self.state, state) '''returns packaging.version object'''
LOG.info(error) if self._release is None:
raise InternalError(error) self._release = version.parse(self.sw_release)
return self._release
@property @property
def sw_release(self): def sw_release(self):
@ -97,7 +87,14 @@ class SWRelease(object):
self._sw_version = utils.get_major_release_version(self.sw_release) self._sw_version = utils.get_major_release_version(self.sw_release)
return self._sw_version return self._sw_version
@property
def component(self):
return self._get_by_key('component')
def _get_latest_commit(self): def _get_latest_commit(self):
if 'number_of_commits' not in self.contents:
return None
num_commits = self.contents['number_of_commits'] num_commits = self.contents['number_of_commits']
if int(num_commits) > 0: if int(num_commits) > 0:
commit_tag = "commit%s" % num_commits commit_tag = "commit%s" % num_commits
@ -119,6 +116,14 @@ class SWRelease(object):
# latest commit # latest commit
return None return None
@property
def base_commit_id(self):
commit = None
base = self.contents.get('base')
if base:
commit = base.get('commit')
return commit
def _get_by_key(self, key, default=None): def _get_by_key(self, key, default=None):
if key in self._metadata: if key in self._metadata:
return self._metadata[key] return self._metadata[key]
@ -147,16 +152,28 @@ class SWRelease(object):
@property @property
def unremovable(self): def unremovable(self):
return self._get_by_key('unremovable') return self._get_by_key('unremovable') == "Y"
@property @property
def reboot_required(self): def reboot_required(self):
return self._get_by_key('reboot_required') return self._get_by_key('reboot_required') == "Y"
@property
def requires_release_ids(self):
return self._get_by_key('requires') or []
@property
def packages(self):
return self._get_by_key('packages')
@property @property
def restart_script(self): def restart_script(self):
return self._get_by_key('restart_script') return self._get_by_key('restart_script')
@property
def apply_active_release_only(self):
return self._get_by_key('apply_active_release_only')
@property @property
def commit_checksum(self): def commit_checksum(self):
commit = self._get_latest_commit() commit = self._get_latest_commit()
@ -167,15 +184,76 @@ class SWRelease(object):
# latest commit # latest commit
return None return None
def get_all_dependencies(self, filter_states=None):
"""
:return: sorted list of all direct and indirect required releases
raise ReleaseNotFound if one of the release is not uploaded.
"""
def _get_all_deps(release_id, release_collection, deps):
release = release_collection[release_id]
if release is None:
raise ReleaseNotFound([release_id])
if filter_states and release.state not in filter_states:
return
for id in release.requires_release_ids:
if id not in deps:
deps.append(id)
_get_all_deps(id, release_collection, deps)
all_deps = []
release_collection = get_SWReleaseCollection()
_get_all_deps(self.id, release_collection, all_deps)
releases = sorted([release_collection[id] for id in all_deps])
return releases
def __lt__(self, other):
return self.version_obj < other.version_obj
def __le__(self, other):
return self.version_obj <= other.version_obj
def __eq__(self, other):
return self.version_obj == other.version_obj
def __ge__(self, other):
return self.version_obj >= other.version_obj
def __gt__(self, other):
return self.version_obj > other.version_obj
def __ne__(self, other):
return self.version_obj != other.version_obj
@property @property
def is_ga_release(self): def is_ga_release(self):
ver = version.parse(self.sw_release) ver = version.parse(self.sw_release)
_, _, pp = ver.release if len(ver.release) == 2:
pp = 0
else:
_, _, pp = ver.release
return pp == 0 return pp == 0
@property @property
def is_deletable(self): def is_deletable(self):
return self.state in constants.DELETABLE_STATE return self.state in states.DELETABLE_STATE
def to_query_dict(self):
data = {"release_id": self.id,
"state": self.state,
"sw_version": self.sw_release,
"component": self.component,
"status": self.status,
"unremovable": self.unremovable,
"summary": self.summary,
"description": self.description,
"install_instructions": self.install_instructions,
"warnings": self.warnings,
"reboot_required": self.reboot_required,
"requires": self.requires_release_ids[:],
"packages": self.packages[:]}
return data
class SWReleaseCollection(object): class SWReleaseCollection(object):
@ -191,11 +269,23 @@ class SWReleaseCollection(object):
sw_release = SWRelease(rel_id, rel_data, contents) sw_release = SWRelease(rel_id, rel_data, contents)
self._sw_releases[rel_id] = sw_release self._sw_releases[rel_id] = sw_release
@property
def running_release(self):
latest = None
for rel in self.iterate_releases_by_state(states.DEPLOYED):
if latest is None or rel.version_obj > latest.version_obj:
latest = rel
return latest
def get_release_by_id(self, rel_id): def get_release_by_id(self, rel_id):
if rel_id in self._sw_releases: if rel_id in self._sw_releases:
return self._sw_releases[rel_id] return self._sw_releases[rel_id]
return None return None
def __getitem__(self, rel_id):
return self.get_release_by_id(rel_id)
def get_release_by_commit_id(self, commit_id): def get_release_by_commit_id(self, commit_id):
for _, sw_release in self._sw_releases: for _, sw_release in self._sw_releases:
if sw_release.commit_id == commit_id: if sw_release.commit_id == commit_id:
@ -219,15 +309,44 @@ class SWReleaseCollection(object):
yield self._sw_releases[rel_id] yield self._sw_releases[rel_id]
def update_state(self, list_of_releases, state): def update_state(self, list_of_releases, state):
for release_id in list_of_releases:
release = self.get_release_by_id(release_id)
if release is not None:
if SWRelease.is_valid_state_transition(release.state, state):
SWRelease.ensure_state_transition(state)
else:
LOG.error("release %s not found" % release_id)
for release_id in list_of_releases: for release_id in list_of_releases:
release = self.get_release_by_id(release_id) release = self.get_release_by_id(release_id)
if release is not None: if release is not None:
release.update_state(state) release.update_state(state)
class LocalStorage(object):
def __init__(self):
self._storage = threading.local()
def get_value(self, key):
if hasattr(self._storage, key):
return getattr(self._storage, key)
else:
return None
def set_value(self, key, value):
setattr(self._storage, key, value)
def void_value(self, key):
if hasattr(self._storage, key):
delattr(self._storage, key)
_local_storage = LocalStorage()
def get_SWReleaseCollection():
release_data = _local_storage.get_value('release_data')
if release_data is None:
LOG.info("Load release_data")
release_data = ReleaseData()
release_data.load_all()
LOG.info("release_data loaded")
_local_storage.set_value('release_data', release_data)
return SWReleaseCollection(release_data)
def reload_release_data():
_local_storage.void_value('release_data')

View File

@ -0,0 +1,94 @@
#
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
import logging
from software import states
from software.exceptions import ReleaseNotFound
from software.release_data import get_SWReleaseCollection
from software.release_data import reload_release_data
LOG = logging.getLogger('main_logger')
# valid release state transition below will still be changed as
# development continue
release_state_transition = {
states.AVAILABLE: [states.DEPLOYING],
states.DEPLOYING: [states.DEPLOYED, states.AVAILABLE],
states.DEPLOYED: [states.REMOVING, states.UNAVAILABLE, states.COMMITTED],
states.REMOVING: [states.AVAILABLE],
states.COMMITTED: [],
states.UNAVAILABLE: [],
}
class ReleaseState(object):
def __init__(self, release_ids=None, release_state=None):
not_found_list = []
release_collection = get_SWReleaseCollection()
if release_ids:
self._release_ids = release_ids[:]
not_found_list = [rel_id for rel_id in release_ids if release_collection[rel_id] is None]
elif release_state:
self._release_ids = [rel.id for rel in release_collection.iterate_releases_by_state(release_state)]
if len(not_found_list) > 0:
raise ReleaseNotFound(not_found_list)
@staticmethod
def deploy_updated(target_state):
if target_state is None: # completed
deploying = ReleaseState(release_state=states.DEPLOYING)
if deploying.is_major_release_deployment():
deployed = ReleaseState(release_state=states.DEPLOYED)
deployed.replaced()
deploying.deploy_completed()
def check_transition(self, target_state):
"""check ALL releases can transform to target state"""
release_collection = get_SWReleaseCollection()
for rel_id in self._release_ids:
state = release_collection[rel_id].state
if target_state not in release_state_transition[state]:
return False
return True
def transform(self, target_state):
if self.check_transition(target_state):
release_collection = get_SWReleaseCollection()
release_collection.update_state(self._release_ids, target_state)
reload_release_data()
def is_major_release_deployment(self):
release_collection = get_SWReleaseCollection()
for rel_id in self._release_ids:
release = release_collection.get_release_by_id(rel_id)
if release.is_ga_release:
return True
return False
def start_deploy(self):
self.transform(states.DEPLOYING)
def deploy_completed(self):
self.transform(states.DEPLOYED)
def committed(self):
self.transform(states.COMMITTED)
def replaced(self):
"""
Current running release is replaced with a new deployed release
This indicates a major release deploy is completed and running
release become "unavailable"
"""
self.transform(states.UNAVAILABLE)
def start_remove(self):
self.transform(states.REMOVING)

File diff suppressed because it is too large Load Diff

View File

@ -20,8 +20,8 @@ from software.utils import save_to_json_file
from software.utils import get_software_filesystem_data from software.utils import get_software_filesystem_data
from software.utils import validate_versions from software.utils import validate_versions
from software.constants import DEPLOY_HOST_STATES from software.states import DEPLOY_HOST_STATES
from software.constants import DEPLOY_STATES from software.states import DEPLOY_STATES
LOG = logging.getLogger('main_logger') LOG = logging.getLogger('main_logger')
@ -135,12 +135,15 @@ class Deploy(ABC):
pass pass
@abstractmethod @abstractmethod
def create(self, from_release: str, to_release: str, reboot_required: bool, state: DEPLOY_STATES): def create(self, from_release: str, to_release: str, feed_repo: str,
commit_id: str, reboot_required: bool, state: DEPLOY_STATES):
""" """
Create a new deployment entry. Create a new deployment entry.
:param from_release: The current release version. :param from_release: The current release version.
:param to_release: The target release version. :param to_release: The target release version.
:param feed_repo: ostree repo feed path
:param commit_id: commit-id to deploy
:param reboot_required: If is required to do host reboot. :param reboot_required: If is required to do host reboot.
:param state: The state of the deployment. :param state: The state of the deployment.
@ -230,11 +233,7 @@ class DeployHosts(ABC):
class DeployHandler(Deploy): class DeployHandler(Deploy):
def __init__(self): def create(self, from_release, to_release, feed_repo, commit_id, reboot_required, state=DEPLOY_STATES.START):
super().__init__()
self.data = get_software_filesystem_data()
def create(self, from_release, to_release, reboot_required, state=DEPLOY_STATES.START):
""" """
Create a new deploy with given from and to release version Create a new deploy with given from and to release version
:param from_release: The current release version. :param from_release: The current release version.
@ -242,30 +241,33 @@ class DeployHandler(Deploy):
:param reboot_required: If is required to do host reboot. :param reboot_required: If is required to do host reboot.
:param state: The state of the deployment. :param state: The state of the deployment.
""" """
super().create(from_release, to_release, reboot_required, state) super().create(from_release, to_release, feed_repo, commit_id, reboot_required, state)
deploy = self.query(from_release, to_release) deploy = self.query(from_release, to_release)
if deploy: if deploy:
raise DeployAlreadyExist("Error to create. Deploy already exists.") raise DeployAlreadyExist("Error to create. Deploy already exists.")
new_deploy = { new_deploy = {
"from_release": from_release, "from_release": from_release,
"to_release": to_release, "to_release": to_release,
"feed_repo": feed_repo,
"commit_id": commit_id,
"reboot_required": reboot_required, "reboot_required": reboot_required,
"state": state.value "state": state.value
} }
try: try:
deploy_data = self.data.get("deploy", []) data = get_software_filesystem_data()
deploy_data = data.get("deploy", [])
if not deploy_data: if not deploy_data:
deploy_data = { deploy_data = {
"deploy": [] "deploy": []
} }
deploy_data["deploy"].append(new_deploy) deploy_data["deploy"].append(new_deploy)
self.data.update(deploy_data) data.update(deploy_data)
else: else:
deploy_data.append(new_deploy) deploy_data.append(new_deploy)
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
except Exception: except Exception:
self.data["deploy"][0] = {} LOG.exception()
def query(self, from_release, to_release): def query(self, from_release, to_release):
""" """
@ -275,7 +277,8 @@ class DeployHandler(Deploy):
:return: A list of deploy dictionary :return: A list of deploy dictionary
""" """
super().query(from_release, to_release) super().query(from_release, to_release)
for deploy in self.data.get("deploy", []): data = get_software_filesystem_data()
for deploy in data.get("deploy", []):
if (deploy.get("from_release") == from_release and if (deploy.get("from_release") == from_release and
deploy.get("to_release") == to_release): deploy.get("to_release") == to_release):
return deploy return deploy
@ -286,7 +289,8 @@ class DeployHandler(Deploy):
Query all deployments inside software.json file. Query all deployments inside software.json file.
:return: A list of deploy dictionary :return: A list of deploy dictionary
""" """
return self.data.get("deploy", []) data = get_software_filesystem_data()
return data.get("deploy", [])
def update(self, new_state: DEPLOY_STATES): def update(self, new_state: DEPLOY_STATES):
""" """
@ -298,11 +302,12 @@ class DeployHandler(Deploy):
if not deploy: if not deploy:
raise DeployDoNotExist("Error to update deploy state. No deploy in progress.") raise DeployDoNotExist("Error to update deploy state. No deploy in progress.")
data = get_software_filesystem_data()
try: try:
self.data["deploy"][0]["state"] = new_state.value data["deploy"][0]["state"] = new_state.value
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
except Exception: except Exception:
self.data["deploy"][0] = deploy LOG.exception()
def delete(self): def delete(self):
""" """
@ -312,19 +317,16 @@ class DeployHandler(Deploy):
deploy = self.query_all() deploy = self.query_all()
if not deploy: if not deploy:
raise DeployDoNotExist("Error to delete deploy state. No deploy in progress.") raise DeployDoNotExist("Error to delete deploy state. No deploy in progress.")
data = get_software_filesystem_data()
try: try:
self.data["deploy"].clear() data["deploy"].clear()
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
except Exception: except Exception:
self.data["deploy"][0] = deploy LOG.exception()
class DeployHostHandler(DeployHosts): class DeployHostHandler(DeployHosts):
def __init__(self):
super().__init__()
self.data = get_software_filesystem_data()
def create(self, hostname, state: DEPLOY_HOST_STATES = DEPLOY_HOST_STATES.PENDING): def create(self, hostname, state: DEPLOY_HOST_STATES = DEPLOY_HOST_STATES.PENDING):
super().create(hostname, state) super().create(hostname, state)
deploy = self.query(hostname) deploy = self.query(hostname)
@ -336,16 +338,17 @@ class DeployHostHandler(DeployHosts):
"state": state.value if state else None "state": state.value if state else None
} }
deploy_data = self.data.get("deploy_host", []) data = get_software_filesystem_data()
deploy_data = data.get("deploy_host", [])
if not deploy_data: if not deploy_data:
deploy_data = { deploy_data = {
"deploy_host": [] "deploy_host": []
} }
deploy_data["deploy_host"].append(new_deploy_host) deploy_data["deploy_host"].append(new_deploy_host)
self.data.update(deploy_data) data.update(deploy_data)
else: else:
deploy_data.append(new_deploy_host) deploy_data.append(new_deploy_host)
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
def query(self, hostname): def query(self, hostname):
""" """
@ -354,13 +357,15 @@ class DeployHostHandler(DeployHosts):
:return: A list of deploy dictionary :return: A list of deploy dictionary
""" """
super().query(hostname) super().query(hostname)
for deploy in self.data.get("deploy_host", []): data = get_software_filesystem_data()
for deploy in data.get("deploy_host", []):
if deploy.get("hostname") == hostname: if deploy.get("hostname") == hostname:
return deploy return deploy
return None return None
def query_all(self): def query_all(self):
return self.data.get("deploy_host", []) data = get_software_filesystem_data()
return data.get("deploy_host", [])
def update(self, hostname, state: DEPLOY_HOST_STATES): def update(self, hostname, state: DEPLOY_HOST_STATES):
super().update(hostname, state) super().update(hostname, state)
@ -368,23 +373,26 @@ class DeployHostHandler(DeployHosts):
if not deploy: if not deploy:
raise Exception("Error to update. Deploy host do not exist.") raise Exception("Error to update. Deploy host do not exist.")
index = self.data.get("deploy_host", []).index(deploy) data = get_software_filesystem_data()
index = data.get("deploy_host", []).index(deploy)
updated_entity = { updated_entity = {
"hostname": hostname, "hostname": hostname,
"state": state.value "state": state.value
} }
self.data["deploy_host"][index].update(updated_entity) data["deploy_host"][index].update(updated_entity)
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
return updated_entity return updated_entity
def delete_all(self): def delete_all(self):
self.data.get("deploy_host").clear() data = get_software_filesystem_data()
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) data.get("deploy_host").clear()
save_to_json_file(constants.SOFTWARE_JSON_FILE, data)
def delete(self, hostname): def delete(self, hostname):
super().delete(hostname) super().delete(hostname)
deploy = self.query(hostname) deploy = self.query(hostname)
if not deploy: if not deploy:
raise DeployDoNotExist("Error to delete. Deploy host do not exist.") raise DeployDoNotExist("Error to delete. Deploy host do not exist.")
self.data.get("deploy_host").remove(deploy) data = get_software_filesystem_data()
save_to_json_file(constants.SOFTWARE_JSON_FILE, self.data) data.get("deploy_host").remove(deploy)
save_to_json_file(constants.SOFTWARE_JSON_FILE, data)

View File

@ -32,11 +32,11 @@ from software.exceptions import OSTreeTarFail
from software.exceptions import ReleaseUploadFailure from software.exceptions import ReleaseUploadFailure
from software.exceptions import ReleaseValidationFailure from software.exceptions import ReleaseValidationFailure
from software.exceptions import ReleaseMismatchFailure from software.exceptions import ReleaseMismatchFailure
from software.exceptions import SoftwareFail
from software.exceptions import SoftwareServiceError from software.exceptions import SoftwareServiceError
from software.exceptions import VersionedDeployPrecheckFailure from software.exceptions import VersionedDeployPrecheckFailure
import software.constants as constants import software.constants as constants
from software import states
import software.utils as utils import software.utils as utils
from software.sysinv_utils import get_ihost_list from software.sysinv_utils import get_ihost_list
@ -81,7 +81,7 @@ def configure_logging(logtofile=True, level=logging.INFO):
my_exec = os.path.basename(sys.argv[0]) my_exec = os.path.basename(sys.argv[0])
log_format = '%(asctime)s: ' \ log_format = '%(asctime)s: ' \
+ my_exec + '[%(process)s]: ' \ + my_exec + '[%(process)s:%(thread)d]: ' \
+ '%(filename)s(%(lineno)s): ' \ + '%(filename)s(%(lineno)s): ' \
+ '%(levelname)s: %(message)s' + '%(levelname)s: %(message)s'
@ -231,10 +231,13 @@ class ReleaseData(object):
""" """
def __init__(self): def __init__(self):
self._reset()
def _reset(self):
# #
# The metadata dict stores all metadata associated with a release. # The metadata dict stores all metadata associated with a release.
# This dict is keyed on release_id, with metadata for each release stored # This dict is keyed on release_id, with metadata for each release stored
# in a nested dict. (See parse_metadata method for more info) # in a nested dict. (See parse_metadata_string method for more info)
# #
self.metadata = {} self.metadata = {}
@ -253,8 +256,8 @@ class ReleaseData(object):
for release_id in list(updated_release.metadata): for release_id in list(updated_release.metadata):
# Update all fields except state # Update all fields except state
cur_state = self.metadata[release_id]['state'] cur_state = self.metadata[release_id]['state']
updated_release.metadata[release_id]['state'] = cur_state
self.metadata[release_id].update(updated_release.metadata[release_id]) self.metadata[release_id].update(updated_release.metadata[release_id])
self.metadata[release_id]['state'] = cur_state
def delete_release(self, release_id): def delete_release(self, release_id):
del self.contents[release_id] del self.contents[release_id]
@ -294,22 +297,21 @@ class ReleaseData(object):
outfile.close() outfile.close()
os.rename(new_filename, filename) os.rename(new_filename, filename)
def parse_metadata(self, def parse_metadata_file(self,
filename, filename,
state=None): state=None):
""" """
Parse an individual release metadata XML file Parse an individual release metadata XML file
:param filename: XML file :param filename: XML file
:param state: Indicates Applied, Available, or Committed :param state: Indicates Applied, Available, or Committed
:return: Release ID :return: Release ID
""" """
with open(filename, "r") as f: with open(filename, "r") as f:
text = f.read() text = f.read()
return self.parse_metadata_string(text, state) return self.parse_metadata_string(text, state)
def parse_metadata_string(self, text, state): def parse_metadata_string(self, text, state=None):
root = ElementTree.fromstring(text) root = ElementTree.fromstring(text)
# #
# <patch> # <patch>
@ -391,31 +393,35 @@ class ReleaseData(object):
return release_id return release_id
def load_all_metadata(self, def _read_all_metafile(self, path):
loaddir,
state=None):
""" """
Parse all metadata files in the specified dir Load metadata from all xml files in the specified path
:return: :param path: path of directory that xml files is in
""" """
for fname in glob.glob("%s/*.xml" % loaddir): for filename in glob.glob("%s/*.xml" % path):
self.parse_metadata(fname, state) with open(filename, "r") as f:
text = f.read()
yield filename, text
def load_all(self): def load_all(self):
# Reset the data # Reset the data
self.__init__() self.__init__()
self.load_all_metadata(constants.AVAILABLE_DIR, state=constants.AVAILABLE)
self.load_all_metadata(constants.UNAVAILABLE_DIR, state=constants.UNAVAILABLE)
self.load_all_metadata(constants.DEPLOYING_START_DIR, state=constants.DEPLOYING_START)
self.load_all_metadata(constants.DEPLOYING_HOST_DIR, state=constants.DEPLOYING_HOST)
self.load_all_metadata(constants.DEPLOYING_ACTIVATE_DIR, state=constants.DEPLOYING_ACTIVATE)
self.load_all_metadata(constants.DEPLOYING_COMPLETE_DIR, state=constants.DEPLOYING_COMPLETE)
self.load_all_metadata(constants.DEPLOYED_DIR, state=constants.DEPLOYED)
self.load_all_metadata(constants.REMOVING_DIR, state=constants.REMOVING)
self.load_all_metadata(constants.ABORTING_DIR, state=constants.ABORTING)
self.load_all_metadata(constants.COMMITTED_DIR, state=constants.COMMITTED)
# load the release metadata from feed directory or filesystem db state_map = {
states.AVAILABLE: states.AVAILABLE_DIR,
states.UNAVAILABLE: states.UNAVAILABLE_DIR,
states.DEPLOYING: states.DEPLOYING_DIR,
states.DEPLOYED: states.DEPLOYED_DIR,
states.REMOVING: states.REMOVING_DIR,
}
for state, path in state_map.items():
for filename, text in self._read_all_metafile(path):
try:
self.parse_metadata_string(text, state=state)
except Exception as e:
err_msg = f"Failed parsing {filename}, {e}"
LOG.exception(err_msg)
def query_line(self, def query_line(self,
release_id, release_id,
@ -636,54 +642,56 @@ class PatchFile(object):
raise SystemExit(e.returncode) raise SystemExit(e.returncode)
@staticmethod @staticmethod
def read_patch(path, cert_type=None): def read_patch(path, dest, cert_type=None):
# We want to enable signature checking by default # We want to enable signature checking by default
# Note: cert_type=None is required if we are to enforce 'no dev patches on a formal load' rule. # Note: cert_type=None is required if we are to enforce 'no dev patches on a formal load' rule.
# Open the patch file and extract the contents to the current dir # Open the patch file and extract the contents to the current dir
tar = tarfile.open(path, "r:gz") tar = tarfile.open(path, "r:gz")
tar.extract("signature") tar.extract("signature", path=dest)
try: try:
tar.extract(detached_signature_file) tar.extract(detached_signature_file, path=dest)
except KeyError: except KeyError:
msg = "Patch has not been signed" msg = "Patch has not been signed"
LOG.warning(msg) LOG.warning(msg)
# Filelist used for signature validation and verification # Filelist used for signature validation and verification
sig_filelist = ["metadata.tar", "software.tar"] filelist = ["metadata.tar", "software.tar"]
# Check if conditional scripts are inside the patch # Check if conditional scripts are inside the patch
# If yes then add them to signature checklist # If yes then add them to signature checklist
if "semantics.tar" in [f.name for f in tar.getmembers()]: if "semantics.tar" in [f.name for f in tar.getmembers()]:
sig_filelist.append("semantics.tar") filelist.append("semantics.tar")
if "pre-install.sh" in [f.name for f in tar.getmembers()]: if "pre-install.sh" in [f.name for f in tar.getmembers()]:
sig_filelist.append("pre-install.sh") filelist.append("pre-install.sh")
if "post-install.sh" in [f.name for f in tar.getmembers()]: if "post-install.sh" in [f.name for f in tar.getmembers()]:
sig_filelist.append("post-install.sh") filelist.append("post-install.sh")
for f in sig_filelist: for f in filelist:
tar.extract(f) tar.extract(f, path=dest)
# Verify the data integrity signature first # Verify the data integrity signature first
sigfile = open("signature", "r") sigfile = open(os.path.join(dest, "signature"), "r")
sig = int(sigfile.read(), 16) sig = int(sigfile.read(), 16)
sigfile.close() sigfile.close()
expected_sig = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF expected_sig = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
sig_filelist = [os.path.join(dest, f) for f in filelist]
for f in sig_filelist: for f in sig_filelist:
sig ^= get_md5(f) sig ^= get_md5(f)
if sig != expected_sig: if sig != expected_sig:
msg = "Patch failed verification" msg = "Software failed signature verification."
LOG.error(msg) LOG.error(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
# Verify detached signature # Verify detached signature
if os.path.exists(detached_signature_file): sig_file = os.path.join(dest, detached_signature_file)
if os.path.exists(sig_file):
sig_valid = verify_files( sig_valid = verify_files(
sig_filelist, sig_filelist,
detached_signature_file, sig_file,
cert_type=cert_type) cert_type=cert_type)
if sig_valid is True: if sig_valid is True:
msg = "Signature verified, patch has been signed" msg = "Signature verified, patch has been signed"
@ -693,20 +701,21 @@ class PatchFile(object):
msg = "Signature check failed" msg = "Signature check failed"
if cert_type is None: if cert_type is None:
LOG.error(msg) LOG.error(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
else: else:
msg = "Patch has not been signed" msg = "Software has not been signed."
if cert_type is None: if cert_type is None:
LOG.error(msg) LOG.error(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
# Restart script # Restart script
for f in tar.getmembers(): for f in tar.getmembers():
if f.name not in sig_filelist: if f.name not in filelist:
tar.extract(f) tar.extract(f, path=dest)
tar = tarfile.open("metadata.tar") metadata = os.path.join(dest, "metadata.tar")
tar.extractall() tar = tarfile.open(metadata)
tar.extractall(path=dest)
@staticmethod @staticmethod
def query_patch(patch, field=None): def query_patch(patch, field=None):
@ -716,12 +725,6 @@ class PatchFile(object):
# Create a temporary working directory # Create a temporary working directory
tmpdir = tempfile.mkdtemp(prefix="patch_") tmpdir = tempfile.mkdtemp(prefix="patch_")
# Save the current directory, so we can chdir back after
orig_wd = os.getcwd()
# Change to the tmpdir
os.chdir(tmpdir)
r = {} r = {}
try: try:
@ -729,7 +732,7 @@ class PatchFile(object):
# Need to determine the cert_type # Need to determine the cert_type
for cert_type_str in cert_type_all: for cert_type_str in cert_type_all:
try: try:
PatchFile.read_patch(abs_patch, cert_type=[cert_type_str]) PatchFile.read_patch(abs_patch, tmpdir, cert_type=[cert_type_str])
except ReleaseValidationFailure: except ReleaseValidationFailure:
pass pass
else: else:
@ -738,15 +741,17 @@ class PatchFile(object):
break break
if "cert" not in r: if "cert" not in r:
# NOTE(bqian) below reads like a bug in certain cases. need to revisit.
# If cert is unknown, then file is not yet open for reading. # If cert is unknown, then file is not yet open for reading.
# Try to open it for reading now, using all available keys. # Try to open it for reading now, using all available keys.
# We can't omit cert_type, or pass None, because that will trigger the code # We can't omit cert_type, or pass None, because that will trigger the code
# path used by installed product, in which dev keys are not accepted unless # path used by installed product, in which dev keys are not accepted unless
# a magic file exists. # a magic file exists.
PatchFile.read_patch(abs_patch, cert_type=cert_type_all) PatchFile.read_patch(abs_patch, tmpdir, cert_type=cert_type_all)
thispatch = ReleaseData() thispatch = ReleaseData()
patch_id = thispatch.parse_metadata("metadata.xml") filename = os.path.join(tmpdir, "metadata.xml")
patch_id = thispatch.parse_metadata_file(filename)
if field is None or field == "id": if field is None or field == "id":
r["id"] = patch_id r["id"] = patch_id
@ -761,20 +766,14 @@ class PatchFile(object):
r[field] = thispatch.query_line(patch_id, field) r[field] = thispatch.query_line(patch_id, field)
except ReleaseValidationFailure as e: except ReleaseValidationFailure as e:
msg = "Patch validation failed during extraction" msg = "Patch validation failed during extraction. %s" % str(e)
LOG.exception(msg) LOG.exception(msg)
raise e raise e
except ReleaseMismatchFailure as e: except tarfile.TarError as te:
msg = "Patch Mismatch during extraction" msg = "Extract software failed %s" % str(te)
LOG.exception(msg) LOG.exception(msg)
raise e raise ReleaseValidationFailure(error=msg)
except tarfile.TarError:
msg = "Failed during patch extraction"
LOG.exception(msg)
raise ReleaseValidationFailure(msg)
finally: finally:
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
return r return r
@ -790,45 +789,34 @@ class PatchFile(object):
# Create a temporary working directory # Create a temporary working directory
tmpdir = tempfile.mkdtemp(prefix="patch_") tmpdir = tempfile.mkdtemp(prefix="patch_")
# Save the current directory, so we can chdir back after
orig_wd = os.getcwd()
# Change to the tmpdir
os.chdir(tmpdir)
try: try:
cert_type = None cert_type = None
meta_data = PatchFile.query_patch(abs_patch) meta_data = PatchFile.query_patch(abs_patch)
if 'cert' in meta_data: if 'cert' in meta_data:
cert_type = meta_data['cert'] cert_type = meta_data['cert']
PatchFile.read_patch(abs_patch, cert_type=cert_type) PatchFile.read_patch(abs_patch, tmpdir, cert_type=cert_type)
ReleaseData.modify_metadata_text("metadata.xml", key, value) path = os.path.join(tmpdir, "metadata.xml")
ReleaseData.modify_metadata_text(path, key, value)
PatchFile.write_patch(new_abs_patch, cert_type=cert_type) PatchFile.write_patch(new_abs_patch, cert_type=cert_type)
os.rename(new_abs_patch, abs_patch) os.rename(new_abs_patch, abs_patch)
rc = True rc = True
except ReleaseValidationFailure as e: except tarfile.TarError as te:
raise e msg = "Extract software failed %s" % str(te)
except ReleaseMismatchFailure as e:
raise e
except tarfile.TarError:
msg = "Failed during patch extraction"
LOG.exception(msg) LOG.exception(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
except Exception as e: except Exception as e:
template = "An exception of type {0} occurred. Arguments:\n{1!r}" template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(e).__name__, e.args) message = template.format(type(e).__name__, e.args)
print(message) LOG.exception(message)
finally: finally:
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
return rc return rc
@staticmethod @staticmethod
def extract_patch(patch, def extract_patch(patch,
metadata_dir=constants.AVAILABLE_DIR, metadata_dir=states.AVAILABLE_DIR,
metadata_only=False, metadata_only=False,
existing_content=None, existing_content=None,
base_pkgdata=None): base_pkgdata=None):
@ -845,23 +833,18 @@ class PatchFile(object):
# Create a temporary working directory # Create a temporary working directory
tmpdir = tempfile.mkdtemp(prefix="patch_") tmpdir = tempfile.mkdtemp(prefix="patch_")
# Save the current directory, so we can chdir back after
orig_wd = os.getcwd()
# Change to the tmpdir
os.chdir(tmpdir)
try: try:
# Open the patch file and extract the contents to the tmpdir # Open the patch file and extract the contents to the tmpdir
PatchFile.read_patch(abs_patch) PatchFile.read_patch(abs_patch, tmpdir)
thispatch = ReleaseData() thispatch = ReleaseData()
patch_id = thispatch.parse_metadata("metadata.xml") filename = os.path.join(tmpdir, "metadata.xml")
with open(filename, "r") as f:
text = f.read()
patch_id = thispatch.parse_metadata_string(text)
if patch_id is None: if patch_id is None:
print("Failed to import patch")
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
return None return None
@ -872,15 +855,15 @@ class PatchFile(object):
if not base_pkgdata.check_release(patch_sw_version): if not base_pkgdata.check_release(patch_sw_version):
msg = "Software version %s for release %s is not installed" % (patch_sw_version, patch_id) msg = "Software version %s for release %s is not installed" % (patch_sw_version, patch_id)
LOG.exception(msg) LOG.exception(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
if metadata_only: if metadata_only:
# This is a re-import. Ensure the content lines up # This is a re-import. Ensure the content lines up
if existing_content is None \ if existing_content is None \
or existing_content != thispatch.contents[patch_id]: or existing_content != thispatch.contents[patch_id]:
msg = "Contents of re-imported patch do not match" msg = f"Contents of {patch_id} do not match re-uploaded release"
LOG.exception(msg) LOG.error(msg)
raise ReleaseMismatchFailure(msg) raise ReleaseMismatchFailure(error=msg)
patch_sw_version = utils.get_major_release_version( patch_sw_version = utils.get_major_release_version(
thispatch.metadata[patch_id]["sw_version"]) thispatch.metadata[patch_id]["sw_version"])
@ -888,42 +871,41 @@ class PatchFile(object):
if not os.path.exists(abs_ostree_tar_dir): if not os.path.exists(abs_ostree_tar_dir):
os.makedirs(abs_ostree_tar_dir) os.makedirs(abs_ostree_tar_dir)
shutil.move("metadata.xml", shutil.move(os.path.join(tmpdir, "metadata.xml"),
"%s/%s-metadata.xml" % (abs_metadata_dir, patch_id)) "%s/%s-metadata.xml" % (abs_metadata_dir, patch_id))
shutil.move("software.tar", shutil.move(os.path.join(tmpdir, "software.tar"),
"%s/%s-software.tar" % (abs_ostree_tar_dir, patch_id)) "%s/%s-software.tar" % (abs_ostree_tar_dir, patch_id))
v = "%s/%s-software.tar" % (abs_ostree_tar_dir, patch_id)
LOG.info("software.tar %s" % v)
# restart_script may not exist in metadata. # restart_script may not exist in metadata.
if thispatch.metadata[patch_id].get("restart_script"): if thispatch.metadata[patch_id].get("restart_script"):
if not os.path.exists(root_scripts_dir): if not os.path.exists(root_scripts_dir):
os.makedirs(root_scripts_dir) os.makedirs(root_scripts_dir)
restart_script_name = thispatch.metadata[patch_id]["restart_script"] restart_script_name = os.path.join(tmpdir, thispatch.metadata[patch_id]["restart_script"])
shutil.move(restart_script_name, if os.path.isfile(restart_script_name):
"%s/%s" % (root_scripts_dir, restart_script_name)) shutil.move(restart_script_name, os.path.join(root_scripts_dir, restart_script_name))
except ReleaseValidationFailure as e: except tarfile.TarError as te:
raise e msg = "Extract software failed %s" % str(te)
except ReleaseMismatchFailure as e:
raise e
except tarfile.TarError:
msg = "Failed during patch extraction"
LOG.exception(msg) LOG.exception(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
except KeyError: except KeyError as ke:
msg = "Failed during patch extraction" # NOTE(bqian) assuming this is metadata missing key.
# this try except should be narror down to protect more specific
# routine accessing external data (metadata) only.
msg = "Software metadata missing required value for %s" % str(ke)
LOG.exception(msg) LOG.exception(msg)
raise ReleaseValidationFailure(msg) raise ReleaseValidationFailure(error=msg)
except OSError: # except OSError:
msg = "Failed during patch extraction" # msg = "Failed during patch extraction"
LOG.exception(msg) # LOG.exception(msg)
raise SoftwareFail(msg) # raise SoftwareFail(msg)
except IOError: # pylint: disable=duplicate-except # except IOError: # pylint: disable=duplicate-except
msg = "Failed during patch extraction" # msg = "Failed during patch extraction"
LOG.exception(msg) # LOG.exception(msg)
raise SoftwareFail(msg) # raise SoftwareFail(msg)
finally: finally:
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
return thispatch return thispatch
@ -939,17 +921,16 @@ class PatchFile(object):
# Create a temporary working directory # Create a temporary working directory
patch_tmpdir = tempfile.mkdtemp(prefix="patch_") patch_tmpdir = tempfile.mkdtemp(prefix="patch_")
# Save the current directory, so we can chdir back after
orig_wd = os.getcwd()
# Change to the tmpdir
os.chdir(patch_tmpdir)
# Load the patch # Load the patch
abs_patch = os.path.abspath(patch) abs_patch = os.path.abspath(patch)
PatchFile.read_patch(abs_patch) PatchFile.read_patch(abs_patch, patch_tmpdir)
thispatch = ReleaseData() thispatch = ReleaseData()
patch_id = thispatch.parse_metadata("metadata.xml")
filename = os.path.join(patch_tmpdir, "metadata.xml")
with open(filename, "r") as f:
text = f.read()
patch_id = thispatch.parse_metadata_string(text)
patch_sw_version = utils.get_major_release_version( patch_sw_version = utils.get_major_release_version(
thispatch.metadata[patch_id]["sw_version"]) thispatch.metadata[patch_id]["sw_version"])
@ -982,7 +963,6 @@ class PatchFile(object):
raise OSTreeTarFail(msg) raise OSTreeTarFail(msg)
finally: finally:
shutil.rmtree(tmpdir, ignore_errors=True) shutil.rmtree(tmpdir, ignore_errors=True)
os.chdir(orig_wd)
shutil.rmtree(patch_tmpdir) shutil.rmtree(patch_tmpdir)
@staticmethod @staticmethod
@ -1316,13 +1296,15 @@ def is_deploy_state_in_sync():
return False return False
def is_deployment_in_progress(release_metadata): def is_deployment_in_progress():
""" """
Check if at least one deployment is in progress Check if at least one deployment is in progress
:param release_metadata: dict of release metadata :param release_metadata: dict of release metadata
:return: bool true if in progress, false otherwise :return: bool true if in progress, false otherwise
""" """
return any(release['state'] == constants.DEPLOYING for release in release_metadata.values()) dbapi = get_instance()
deploys = dbapi.get_deploy_all()
return len(deploys) > 0
def set_host_target_load(hostname, major_release): def set_host_target_load(hostname, major_release):

126
software/software/states.py Normal file
View File

@ -0,0 +1,126 @@
"""
Copyright (c) 2023-2024 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0
"""
from enum import Enum
import os
from software.constants import SOFTWARE_STORAGE_DIR
# software release life cycle
# (fresh install) -> deployed -> (upgrade to next version and deploy complete) -> unavailable -> (deleted)
# ^
# |---------------------------------------------------------
# ^
# |
# (upload) -> available ->(deploy start) -> deploying -> (deploy complete) -> deployed
# \---> (deleted)
#
# deploy life cycle
# (deploy-start)
# |
# V
# deploy-start
# |
# V
# start-done -> deploy-host -> deploy-active -> deploy-active-done -> deploy-complete -> (delete)
# \ \ \
# \--------------\------------\----> (deploy abort) -> deploy-abort --> deplete-abort-done -> (delete)
#
# deploy host life cycle
# /----(deploy abort/reverse deploy)---
# / |
# / V
# (deploy-start) -> pending -> deploying -------------> deployed --------(deploy-complete) -> (deleted)
# ^ \---------> (deploy abort/reverse deploy)
# | /
# |-------------------------------------------/
# Release states
AVAILABLE_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/available")
UNAVAILABLE_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/unavailable")
DEPLOYING_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/deploying")
DEPLOYED_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/deployed")
REMOVING_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/removing")
COMMITTED_DIR = os.path.join(SOFTWARE_STORAGE_DIR, "metadata/committed")
DEPLOY_STATE_METADATA_DIR = [
AVAILABLE_DIR,
UNAVAILABLE_DIR,
DEPLOYING_DIR,
DEPLOYED_DIR,
REMOVING_DIR,
COMMITTED_DIR,
]
# new release state needs to be added to VALID_RELEASE_STATES list
AVAILABLE = 'available'
UNAVAILABLE = 'unavailable'
DEPLOYING = 'deploying'
DEPLOYED = 'deployed'
REMOVING = 'removing'
COMMITTED = 'committed'
VALID_RELEASE_STATES = [AVAILABLE, UNAVAILABLE, DEPLOYING, DEPLOYED,
REMOVING, COMMITTED]
RELEASE_STATE_TO_DIR_MAP = {AVAILABLE: AVAILABLE_DIR,
UNAVAILABLE: UNAVAILABLE_DIR,
DEPLOYING: DEPLOYING_DIR,
DEPLOYED: DEPLOYED_DIR,
REMOVING: REMOVING_DIR,
COMMITTED: COMMITTED_DIR}
DELETABLE_STATE = [AVAILABLE, UNAVAILABLE]
# valid release state transition below could still be changed as
# development continue
RELEASE_STATE_VALID_TRANSITION = {
AVAILABLE: [DEPLOYING],
DEPLOYING: [DEPLOYED, AVAILABLE],
DEPLOYED: [REMOVING, UNAVAILABLE]
}
VALID_DEPLOY_START_STATES = [
AVAILABLE,
DEPLOYED,
]
# deploy states
class DEPLOY_STATES(Enum):
START = 'start'
START_DONE = 'start-done'
START_FAILED = 'start-failed'
HOST = 'host'
HOST_DONE = 'host-done'
HOST_FAILED = 'host-failed'
ACTIVATE = 'activate'
ACTIVATE_DONE = 'activate-done'
ACTIVATE_FAILED = 'activate-failed'
ABORT = 'abort'
ABORT_DONE = 'abort-done'
# deploy host state
class DEPLOY_HOST_STATES(Enum):
DEPLOYED = 'deployed'
DEPLOYING = 'deploying'
FAILED = 'failed'
PENDING = 'pending'
VALID_HOST_DEPLOY_STATE = [
DEPLOY_HOST_STATES.DEPLOYED,
DEPLOY_HOST_STATES.DEPLOYING,
DEPLOY_HOST_STATES.FAILED,
DEPLOY_HOST_STATES.PENDING,
]

View File

@ -5,15 +5,15 @@
# #
# This import has to be first # This import has to be first
from software.tests import base # pylint: disable=unused-import from software.tests import base # pylint: disable=unused-import # noqa: F401
from software.software_controller import PatchController from software.software_controller import PatchController
from software.software_controller import ReleaseValidationFailure from software.exceptions import ReleaseValidationFailure
import unittest import unittest
from unittest.mock import MagicMock from unittest.mock import MagicMock
from unittest.mock import mock_open from unittest.mock import mock_open
from unittest.mock import patch from unittest.mock import patch
from software import constants from software import constants
from software import states
class TestSoftwareController(unittest.TestCase): class TestSoftwareController(unittest.TestCase):
@ -65,8 +65,7 @@ class TestSoftwareController(unittest.TestCase):
# Call the function being tested # Call the function being tested
with patch('software.software_controller.SW_VERSION', '1.0.0'): with patch('software.software_controller.SW_VERSION', '1.0.0'):
info, warning, error, release_meta_info = controller._process_upload_upgrade_files(self.upgrade_files, # pylint: disable=protected-access info, warning, error, release_meta_info = controller._process_upload_upgrade_files(self.upgrade_files) # pylint: disable=protected-access
controller.release_data)
# Verify that the expected functions were called with the expected arguments # Verify that the expected functions were called with the expected arguments
mock_verify_files.assert_called_once_with([self.upgrade_files[constants.ISO_EXTENSION]], mock_verify_files.assert_called_once_with([self.upgrade_files[constants.ISO_EXTENSION]],
@ -85,7 +84,7 @@ class TestSoftwareController(unittest.TestCase):
# Verify that the expected messages were returned # Verify that the expected messages were returned
self.assertEqual( self.assertEqual(
info, info,
'iso and signature files upload completed\nImporting iso is in progress\nLoad import successful') 'Load import successful')
self.assertEqual(warning, '') self.assertEqual(warning, '')
self.assertEqual(error, '') self.assertEqual(error, '')
self.assertEqual( self.assertEqual(
@ -114,17 +113,14 @@ class TestSoftwareController(unittest.TestCase):
# Call the function being tested # Call the function being tested
with patch('software.software_controller.SW_VERSION', '1.0'): with patch('software.software_controller.SW_VERSION', '1.0'):
info, warning, error, _ = controller._process_upload_upgrade_files(self.upgrade_files, # pylint: disable=protected-access try:
controller.release_data) controller._process_upload_upgrade_files(self.upgrade_files) # pylint: disable=protected-access
except ReleaseValidationFailure as e:
# Verify that the expected messages were returned self.assertEqual(e.error, 'Software test.iso:test.sig signature validation failed')
self.assertEqual(info, '')
self.assertEqual(warning, '')
self.assertEqual(error, 'Upgrade file signature verification failed\n')
@patch('software.software_controller.PatchController.__init__', return_value=None) @patch('software.software_controller.PatchController.__init__', return_value=None)
@patch('software.software_controller.verify_files', @patch('software.software_controller.verify_files',
side_effect=ReleaseValidationFailure('Invalid signature file')) side_effect=ReleaseValidationFailure(error='Invalid signature file'))
@patch('software.software_controller.PatchController.major_release_upload_check') @patch('software.software_controller.PatchController.major_release_upload_check')
def test_process_upload_upgrade_files_validation_error(self, def test_process_upload_upgrade_files_validation_error(self,
mock_major_release_upload_check, mock_major_release_upload_check,
@ -137,13 +133,10 @@ class TestSoftwareController(unittest.TestCase):
mock_major_release_upload_check.return_value = True mock_major_release_upload_check.return_value = True
# Call the function being tested # Call the function being tested
info, warning, error, _ = controller._process_upload_upgrade_files(self.upgrade_files, # pylint: disable=protected-access try:
controller.release_data) controller._process_upload_upgrade_files(self.upgrade_files) # pylint: disable=protected-access
except ReleaseValidationFailure as e:
# Verify that the expected messages were returned self.assertEqual(e.error, "Invalid signature file")
self.assertEqual(info, '')
self.assertEqual(warning, '')
self.assertEqual(error, 'Upgrade file signature verification failed\n')
@patch('software.software_controller.os.path.isfile') @patch('software.software_controller.os.path.isfile')
@patch('software.software_controller.json.load') @patch('software.software_controller.json.load')
@ -238,8 +231,8 @@ class TestSoftwareController(unittest.TestCase):
"to_release": "2.0.0" "to_release": "2.0.0"
}) })
controller.db_api_instance.get_deploy_host = MagicMock(return_value=[ controller.db_api_instance.get_deploy_host = MagicMock(return_value=[
{"hostname": "host1", "state": constants.DEPLOYED}, {"hostname": "host1", "state": states.DEPLOYED},
{"hostname": "host2", "state": constants.DEPLOYING} {"hostname": "host2", "state": states.DEPLOYING}
]) ])
# Test when the host is deployed # Test when the host is deployed
@ -248,7 +241,7 @@ class TestSoftwareController(unittest.TestCase):
"hostname": "host1", "hostname": "host1",
"current_sw_version": "2.0.0", "current_sw_version": "2.0.0",
"target_sw_version": "2.0.0", "target_sw_version": "2.0.0",
"host_state": constants.DEPLOYED "host_state": states.DEPLOYED
}]) }])
@patch('software.software_controller.json.load') @patch('software.software_controller.json.load')
@ -267,8 +260,8 @@ class TestSoftwareController(unittest.TestCase):
"to_release": "2.0.0" "to_release": "2.0.0"
}) })
controller.db_api_instance.get_deploy_host = MagicMock(return_value=[ controller.db_api_instance.get_deploy_host = MagicMock(return_value=[
{"hostname": "host1", "state": constants.DEPLOYED}, {"hostname": "host1", "state": states.DEPLOYED},
{"hostname": "host2", "state": constants.DEPLOYING} {"hostname": "host2", "state": states.DEPLOYING}
]) ])
# Test when the host is deploying # Test when the host is deploying
@ -277,7 +270,7 @@ class TestSoftwareController(unittest.TestCase):
"hostname": "host2", "hostname": "host2",
"current_sw_version": "1.0.0", "current_sw_version": "1.0.0",
"target_sw_version": "2.0.0", "target_sw_version": "2.0.0",
"host_state": constants.DEPLOYING "host_state": states.DEPLOYING
}]) }])
@patch('software.software_controller.json.load') @patch('software.software_controller.json.load')
@ -296,8 +289,8 @@ class TestSoftwareController(unittest.TestCase):
"to_release": "2.0.0" "to_release": "2.0.0"
}) })
controller.db_api_instance.get_deploy_host = MagicMock(return_value=[ controller.db_api_instance.get_deploy_host = MagicMock(return_value=[
{"hostname": "host1", "state": constants.DEPLOYED}, {"hostname": "host1", "state": states.DEPLOYED},
{"hostname": "host2", "state": constants.DEPLOYING} {"hostname": "host2", "state": states.DEPLOYING}
]) ])
# Test when the host is deploying # Test when the host is deploying
@ -306,12 +299,12 @@ class TestSoftwareController(unittest.TestCase):
"hostname": "host1", "hostname": "host1",
"current_sw_version": "2.0.0", "current_sw_version": "2.0.0",
"target_sw_version": "2.0.0", "target_sw_version": "2.0.0",
"host_state": constants.DEPLOYED "host_state": states.DEPLOYED
}, { }, {
"hostname": "host2", "hostname": "host2",
"current_sw_version": "1.0.0", "current_sw_version": "1.0.0",
"target_sw_version": "2.0.0", "target_sw_version": "2.0.0",
"host_state": constants.DEPLOYING "host_state": states.DEPLOYING
}]) }])
@patch('software.software_controller.json.load') @patch('software.software_controller.json.load')
@ -394,4 +387,4 @@ class TestSoftwareController(unittest.TestCase):
# Verify that the expected methods were called # Verify that the expected methods were called
db_api_instance_mock.get_deploy_all.assert_called_once() db_api_instance_mock.get_deploy_all.assert_called_once()
self.assertEqual(result, None) self.assertIsNone(result)

View File

@ -130,7 +130,7 @@ class TestSoftwareFunction(unittest.TestCase):
self.assertEqual(val["install_instructions"], r.install_instructions) self.assertEqual(val["install_instructions"], r.install_instructions)
self.assertEqual(val["warnings"], r.warnings) self.assertEqual(val["warnings"], r.warnings)
self.assertEqual(val["status"], r.status) self.assertEqual(val["status"], r.status)
self.assertEqual(val["unremovable"], r.unremovable) self.assertEqual(val["unremovable"] == 'Y', r.unremovable)
if val["restart_script"] is None: if val["restart_script"] is None:
self.assertIsNone(r.restart_script) self.assertIsNone(r.restart_script)
else: else:
@ -159,7 +159,7 @@ class TestSoftwareFunction(unittest.TestCase):
self.assertEqual(val["install_instructions"], r.install_instructions) self.assertEqual(val["install_instructions"], r.install_instructions)
self.assertEqual(val["warnings"], r.warnings) self.assertEqual(val["warnings"], r.warnings)
self.assertEqual(val["status"], r.status) self.assertEqual(val["status"], r.status)
self.assertEqual(val["unremovable"], r.unremovable) self.assertEqual(val["unremovable"] == 'Y', r.unremovable)
if val["restart_script"] is None: if val["restart_script"] is None:
self.assertIsNone(r.restart_script) self.assertIsNone(r.restart_script)
else: else:
@ -178,7 +178,7 @@ class TestSoftwareFunction(unittest.TestCase):
self.assertEqual(val["install_instructions"], r.install_instructions) self.assertEqual(val["install_instructions"], r.install_instructions)
self.assertEqual(val["warnings"], r.warnings) self.assertEqual(val["warnings"], r.warnings)
self.assertEqual(val["status"], r.status) self.assertEqual(val["status"], r.status)
self.assertEqual(val["unremovable"], r.unremovable) self.assertEqual(val["unremovable"] == 'Y', r.unremovable)
if val["restart_script"] is None: if val["restart_script"] is None:
self.assertIsNone(r.restart_script) self.assertIsNone(r.restart_script)
else: else:

View File

@ -43,20 +43,19 @@ class ExceptionHook(hooks.PecanHook):
status = 500 status = 500
if isinstance(e, SoftwareServiceError): if isinstance(e, SoftwareServiceError):
LOG.warning("An issue is detected. Signature [%s]" % signature) # Only the exceptions that are pre-categorized as "expected" that
# are known as operational or environmental, the detail (possibly
# with recovery/resolve instruction) are to be displayed to the end
# user
LOG.warning("%s. Signature [%s]" % (e.error, signature))
# TODO(bqian) remove the logging after it is stable # TODO(bqian) remove the logging after it is stable
LOG.exception(e) LOG.exception(e)
data = dict(info=e.info, warning=e.warning, error=e.error) data = dict(info=e.info, warning=e.warning, error=e.error)
else: else:
# with an exception that is not pre-categorized as "expected", it is a
# bug. Or not properly categorizing the exception itself is a bug.
err_msg = "Internal error occurred. Error signature [%s]" % signature err_msg = "Internal error occurred. Error signature [%s]" % signature
try:
# If exception contains error details, send that to user
if str(e):
err_msg = "Error \"%s\", Error signature [%s]" % (str(e), signature)
except Exception:
pass
LOG.error(err_msg)
LOG.exception(e) LOG.exception(e)
data = dict(info="", warning="", error=err_msg) data = dict(info="", warning="", error=err_msg)
return webob.Response(json.dumps(data), status=status) return webob.Response(json.dumps(data), status=status)

View File

@ -61,9 +61,9 @@ commands =
# H203: Use assertIs(Not)None to check for None (off by default). # H203: Use assertIs(Not)None to check for None (off by default).
enable-extensions = H106,H203 enable-extensions = H106,H203
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,release-tag-* exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,release-tag-*
max-line-length = 80 max-line-length = 120
show-source = True show-source = True
ignore = E402,H306,H404,H405,W504,E501 ignore = E402,H306,H404,H405,W504,E501,H105
[testenv:flake8] [testenv:flake8]
commands = flake8 {posargs} commands = flake8 {posargs}