Resolve bashate and pep8 warnings

This update addresses existing bashate and pep8 warnings in
cgcs-patch, patch-alarm, and tsconfig. The bulk of these updates
are style and spacing changes, such as whitespace style conventions.

Story: 2003371
Task: 24433

Change-Id: I44b26d24788907bac0730a952d70ed4bafb87d90
Signed-off-by: Don Penney <don.penney@windriver.com>
This commit is contained in:
Don Penney 2018-08-08 19:36:58 -05:00
parent e5afe88d43
commit ae0314279f
33 changed files with 358 additions and 394 deletions

View File

@ -13,14 +13,12 @@ SYSTEM_CHANGED_FLAG=/var/run/node_is_patched
logfile=/var/log/patching.log logfile=/var/log/patching.log
function LOG() function LOG {
{
logger "$NAME: $*" logger "$NAME: $*"
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
} }
if [ -f $SYSTEM_CHANGED_FLAG ] if [ -f $SYSTEM_CHANGED_FLAG ]; then
then
LOG "Node has been patched. Failing goenabled check." LOG "Node has been patched. Failing goenabled check."
exit 1 exit 1
fi fi

View File

@ -22,7 +22,7 @@ DAEMON="/usr/sbin/sw-patch-agent"
PIDFILE="/var/run/sw-patch-agent.pid" PIDFILE="/var/run/sw-patch-agent.pid"
PATCH_INSTALLING_FILE="/var/run/patch_installing" PATCH_INSTALLING_FILE="/var/run/patch_installing"
start() start()
{ {
if [ -e $PIDFILE ]; then if [ -e $PIDFILE ]; then
PIDDIR=/proc/$(cat $PIDFILE) PIDDIR=/proc/$(cat $PIDFILE)
@ -47,18 +47,15 @@ start()
fi fi
} }
stop() stop()
{ {
if [ -f $PATCH_INSTALLING_FILE ] if [ -f $PATCH_INSTALLING_FILE ]; then
then
echo "Patches are installing. Waiting for install to complete." echo "Patches are installing. Waiting for install to complete."
while [ -f $PATCH_INSTALLING_FILE ] while [ -f $PATCH_INSTALLING_FILE ]; do
do
# Verify the agent is still running # Verify the agent is still running
pid=$(cat $PATCH_INSTALLING_FILE) pid=$(cat $PATCH_INSTALLING_FILE)
cat /proc/$pid/cmdline 2>/dev/null | grep -q $DAEMON cat /proc/$pid/cmdline 2>/dev/null | grep -q $DAEMON
if [ $? -ne 0 ] if [ $? -ne 0 ]; then
then
echo "Patch agent not running." echo "Patch agent not running."
break break
fi fi

View File

@ -21,7 +21,7 @@ DESC="sw-patch-controller-daemon"
DAEMON="/usr/sbin/sw-patch-controller-daemon" DAEMON="/usr/sbin/sw-patch-controller-daemon"
PIDFILE="/var/run/sw-patch-controller-daemon.pid" PIDFILE="/var/run/sw-patch-controller-daemon.pid"
start() start()
{ {
if [ -e $PIDFILE ]; then if [ -e $PIDFILE ]; then
PIDDIR=/proc/$(cat $PIDFILE) PIDDIR=/proc/$(cat $PIDFILE)
@ -46,7 +46,7 @@ start()
fi fi
} }
stop() stop()
{ {
echo -n "Stopping $DESC..." echo -n "Stopping $DESC..."
start-stop-daemon --stop --quiet --pidfile $PIDFILE start-stop-daemon --stop --quiet --pidfile $PIDFILE

View File

@ -20,21 +20,17 @@ PATCHING_DIR=/opt/patching
logfile=/var/log/patching.log logfile=/var/log/patching.log
function LOG() function LOG {
{
logger "$NAME: $*" logger "$NAME: $*"
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
} }
function LOG_TO_FILE() function LOG_TO_FILE {
{
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
} }
function create_groups() function create_groups {
{ if [ -f $GROUPS_FILE ]; then
if [ -f $GROUPS_FILE ]
then
return 0 return 0
fi fi
@ -45,11 +41,9 @@ function create_groups()
EOF EOF
} }
function do_setup() function do_setup {
{
# Does the repo exist? # Does the repo exist?
if [ ! -d $REPO_DIR ] if [ ! -d $REPO_DIR ]; then
then
LOG "Creating repo" LOG "Creating repo"
mkdir -p $REPO_DIR mkdir -p $REPO_DIR
@ -59,8 +53,7 @@ function do_setup()
createrepo -g $GROUPS_FILE $REPO_DIR >> $logfile 2>&1 createrepo -g $GROUPS_FILE $REPO_DIR >> $logfile 2>&1
fi fi
if [ ! -d $PATCHING_DIR ] if [ ! -d $PATCHING_DIR ]; then
then
LOG "Creating $PATCHING_DIR" LOG "Creating $PATCHING_DIR"
mkdir -p $PATCHING_DIR mkdir -p $PATCHING_DIR
fi fi
@ -68,8 +61,7 @@ function do_setup()
# If we can ping the active controller, sync the repos # If we can ping the active controller, sync the repos
LOG_TO_FILE "ping -c 1 -w 1 controller" LOG_TO_FILE "ping -c 1 -w 1 controller"
ping -c 1 -w 1 controller >> $logfile 2>&1 || ping6 -c 1 -w 1 controller >> $logfile 2>&1 ping -c 1 -w 1 controller >> $logfile 2>&1 || ping6 -c 1 -w 1 controller >> $logfile 2>&1
if [ $? -ne 0 ] if [ $? -ne 0 ]; then
then
LOG "Cannot ping controller. Nothing to do" LOG "Cannot ping controller. Nothing to do"
return 0 return 0
fi fi

View File

@ -15,15 +15,12 @@ NAME=$(basename $0)
logfile=/var/log/patching.log logfile=/var/log/patching.log
function LOG_TO_FILE() function LOG_TO_FILE {
{
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
} }
function check_for_rr_patch() function check_for_rr_patch {
{ if [ -f /var/run/node_is_patched_rr ]; then
if [ -f /var/run/node_is_patched_rr ]
then
echo echo
echo "Node has been patched and requires an immediate reboot." echo "Node has been patched and requires an immediate reboot."
echo echo
@ -32,14 +29,11 @@ function check_for_rr_patch()
fi fi
} }
function check_install_uuid() function check_install_uuid {
{
# Check whether our installed load matches the active controller # Check whether our installed load matches the active controller
CONTROLLER_UUID=`curl -sf http://controller/feed/rel-${SW_VERSION}/install_uuid` CONTROLLER_UUID=`curl -sf http://controller/feed/rel-${SW_VERSION}/install_uuid`
if [ $? -ne 0 ] if [ $? -ne 0 ]; then
then if [ "$HOSTNAME" = "controller-1" ]; then
if [ "$HOSTNAME" = "controller-1" ]
then
# If we're on controller-1, controller-0 may not have the install_uuid # If we're on controller-1, controller-0 may not have the install_uuid
# matching this release, if we're in an upgrade. If the file doesn't exist, # matching this release, if we're in an upgrade. If the file doesn't exist,
# bypass this check # bypass this check
@ -51,8 +45,7 @@ function check_install_uuid()
return 1 return 1
fi fi
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ] if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]; then
then
LOG_TO_FILE "This node is running a different load than the active controller and must be reinstalled" LOG_TO_FILE "This node is running a different load than the active controller and must be reinstalled"
echo "This node is running a different load than the active controller and must be reinstalled" echo "This node is running a different load than the active controller and must be reinstalled"
return 1 return 1
@ -69,8 +62,7 @@ if [ -f /etc/platform/installation_failed ] ; then
fi fi
# Clean up the RPM DB # Clean up the RPM DB
if [ ! -f /var/run/.rpmdb_cleaned ] if [ ! -f /var/run/.rpmdb_cleaned ]; then
then
LOG_TO_FILE "Cleaning RPM DB" LOG_TO_FILE "Cleaning RPM DB"
rm -f /var/lib/rpm/__db* rm -f /var/lib/rpm/__db*
touch /var/run/.rpmdb_cleaned touch /var/run/.rpmdb_cleaned
@ -82,28 +74,24 @@ fi
DELAY_SEC=120 DELAY_SEC=120
START=`date +%s` START=`date +%s`
FOUND=0 FOUND=0
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ] while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]; do
do
ping -c 1 controller > /dev/null 2>&1 || ping6 -c 1 controller > /dev/null 2>&1 ping -c 1 controller > /dev/null 2>&1 || ping6 -c 1 controller > /dev/null 2>&1
if [ $? -eq 0 ] if [ $? -eq 0 ]; then
then
FOUND=1 FOUND=1
break break
fi fi
sleep 1 sleep 1
done done
if [ ${FOUND} -eq 0 ] if [ ${FOUND} -eq 0 ]; then
then # 'controller' is not available, just exit
# 'controller' is not available, just exit LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue." exit 1
exit 1
fi fi
case "$1" in case "$1" in
start) start)
if [ "${system_mode}" = "simplex" ] if [ "${system_mode}" = "simplex" ]; then
then
# On a simplex CPE, we need to launch the http server first, # On a simplex CPE, we need to launch the http server first,
# before we can do the patch installation # before we can do the patch installation
LOG_TO_FILE "***** Launching lighttpd *****" LOG_TO_FILE "***** Launching lighttpd *****"
@ -117,8 +105,7 @@ case "$1" in
/etc/init.d/lighttpd stop /etc/init.d/lighttpd stop
else else
check_install_uuid check_install_uuid
if [ $? -ne 0 ] if [ $? -ne 0 ]; then
then
# The INSTALL_UUID doesn't match the active controller, so exit # The INSTALL_UUID doesn't match the active controller, so exit
exit 1 exit 1
fi fi

View File

@ -35,11 +35,11 @@ LOCAL_PATCH_DATA_DIR = "export/patch_data"
ORDER_FILE = "patch_order" ORDER_FILE = "patch_order"
ARCH_DEFAULT = "x86_64" ARCH_DEFAULT = "x86_64"
METADATA_TAGS = [ 'ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION', METADATA_TAGS = ['ID', 'SW_VERSION', 'SUMMARY', 'DESCRIPTION',
'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS', 'INSTALL_INSTRUCTIONS', 'WARNINGS', 'STATUS',
'UNREMOVABLE', 'REBOOT_REQUIRED' ] 'UNREMOVABLE', 'REBOOT_REQUIRED']
RMP_EXCLUDES = [ '-dev-', '-dbg-', '-doc-' ] RMP_EXCLUDES = ['-dev-', '-dbg-', '-doc-']
BUILD_TYPES = [ 'std', 'rt' ] BUILD_TYPES = ['std', 'rt']
SAME = 0 SAME = 0
@ -74,7 +74,7 @@ capture_source_flag = False
capture_rpms_flag = False capture_rpms_flag = False
capture_source_path = None capture_source_path = None
logfile = "/var/log/patching.log" logfile = "/var/log/patching.log"
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -96,21 +96,22 @@ def configure_logging(logtofile=True, level=logging.DEBUG):
else: else:
logging.basicConfig(level=level) logging.basicConfig(level=level)
def rev_lt(num1, num2): def rev_lt(num1, num2):
n1w=num1.split('.') n1w = num1.split('.')
n2w=num2.split('.') n2w = num2.split('.')
while True: while True:
try: try:
n1=int(n1w.pop(0)) n1 = int(n1w.pop(0))
except: except:
return True return True
try: try:
n2=int(n2w.pop(0)) n2 = int(n2w.pop(0))
except: except:
return False return False
if n1<n2: if n1 < n2:
return True return True
if n1>n2: if n1 > n2:
return False return False
@ -126,6 +127,7 @@ def add_text_tag_to_xml(parent, name, text):
tag.text = text tag.text = text
return tag return tag
def handle_exception(exc_type, exc_value, exc_traceback): def handle_exception(exc_type, exc_value, exc_traceback):
""" """
Exception handler to log any uncaught exceptions Exception handler to log any uncaught exceptions
@ -134,6 +136,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):
exc_info=(exc_type, exc_value, exc_traceback)) exc_info=(exc_type, exc_value, exc_traceback))
sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.__excepthook__(exc_type, exc_value, exc_traceback)
def write_xml_file(top, fname): def write_xml_file(top, fname):
# Generate the file, in a readable format if possible # Generate the file, in a readable format if possible
outfile = open(fname, 'w') outfile = open(fname, 'w')
@ -147,6 +150,7 @@ def write_xml_file(top, fname):
else: else:
outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" ")) outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" "))
class PatchRecipeError(Exception): class PatchRecipeError(Exception):
"""Base class for patch recipe exceptions.""" """Base class for patch recipe exceptions."""
@ -156,26 +160,32 @@ class PatchRecipeError(Exception):
def __str__(self): def __str__(self):
return self.message or "" return self.message or ""
class PatchRecipeXMLFail(PatchRecipeError): class PatchRecipeXMLFail(PatchRecipeError):
"""Problem parsing XML of patch recipe.""" """Problem parsing XML of patch recipe."""
pass pass
class PatchBuildFail(PatchRecipeError): class PatchBuildFail(PatchRecipeError):
"""Problem Compiling the patch.""" """Problem Compiling the patch."""
pass pass
class PatchPackagingFail(PatchRecipeError): class PatchPackagingFail(PatchRecipeError):
"""Problem assembling the patch.""" """Problem assembling the patch."""
pass pass
class PatchPackagingMiss(PatchRecipeError): class PatchPackagingMiss(PatchRecipeError):
"""Problem assembling the patch - might be correctable.""" """Problem assembling the patch - might be correctable."""
pass pass
class PatchRequirementFail(PatchRecipeError): class PatchRequirementFail(PatchRecipeError):
"""Missing Requirement.""" """Missing Requirement."""
pass pass
class PatchRecipeCmdFail(PatchRecipeError): class PatchRecipeCmdFail(PatchRecipeError):
"""Shell command Failure.""" """Shell command Failure."""
pass pass
@ -241,7 +251,7 @@ class PatchList:
if patch == patch_id: if patch == patch_id:
return self.patch_data[patch] return self.patch_data[patch]
return None return None
def _validate_patch_order(self): def _validate_patch_order(self):
fix_local_order = False fix_local_order = False
remote_order = [] remote_order = []
@ -266,7 +276,7 @@ class PatchList:
break break
if fix_local_order: if fix_local_order:
print "_validate_patch_order: fix patch order" print "_validate_patch_order: fix patch order"
f = open(self._std_local_path(self.order_file),'w') f = open(self._std_local_path(self.order_file), 'w')
for patch_id in validated_order: for patch_id in validated_order:
f.write("%s\n" % patch_id) f.write("%s\n" % patch_id)
print "_validate_patch_order: %s" % patch_id print "_validate_patch_order: %s" % patch_id
@ -282,7 +292,7 @@ class PatchList:
os.chdir(workdir) os.chdir(workdir)
issue_cmd("mkdir -p %s" % self._std_remote_copy_path("")) issue_cmd("mkdir -p %s" % self._std_remote_copy_path(""))
os.chdir(self._std_remote_copy_path("")) os.chdir(self._std_remote_copy_path(""))
if not os.path.isdir(self.patch_git): if not os.path.isdir(self.patch_git):
issue_cmd("git clone ssh://%s@vxgit.wrs.com:7999/cgcs/%s.git" % (os.environ['USER'], self.patch_git)) issue_cmd("git clone ssh://%s@vxgit.wrs.com:7999/cgcs/%s.git" % (os.environ['USER'], self.patch_git))
os.chdir(self.patch_git) os.chdir(self.patch_git)
@ -327,7 +337,7 @@ class PatchList:
for patch_id in self.patches_to_deliver: for patch_id in self.patches_to_deliver:
os.chdir(workdir) os.chdir(workdir)
patch = "%s.patch" % patch_id patch = "%s.patch" % patch_id
print "signing patch '%s'" % self._std_local_path(patch) print "signing patch '%s'" % self._std_local_path(patch)
try: try:
subprocess.check_call(["sign_patch_formal.sh", self._std_local_path(patch)]) subprocess.check_call(["sign_patch_formal.sh", self._std_local_path(patch)])
@ -377,7 +387,6 @@ class PatchList:
xml_path = self._std_local_path(self._std_xml_patch_recipe_name(patch_id)) xml_path = self._std_local_path(self._std_xml_patch_recipe_name(patch_id))
self.add(xml_path, built=True, fix=False) self.add(xml_path, built=True, fix=False)
def get_implicit_requires(self, patch_id, recipies): def get_implicit_requires(self, patch_id, recipies):
list = [] list = []
for r in recipies: for r in recipies:
@ -513,7 +522,6 @@ class PatchList:
prd.gen_xml(fname=self._std_local_path(self._std_xml_patch_recipe_name(prd.patch_id))) prd.gen_xml(fname=self._std_local_path(self._std_xml_patch_recipe_name(prd.patch_id)))
def build_patches(self): def build_patches(self):
global capture_source_flag global capture_source_flag
# While unbuild patches exist # While unbuild patches exist
@ -527,7 +535,7 @@ class PatchList:
if rc: if rc:
# This patch is ready to build, build it now # This patch is ready to build, build it now
print "Ready to build patch %s." % patch_id print "Ready to build patch %s." % patch_id
rc = prd.build_patch() rc = prd.build_patch()
if rc: if rc:
# append new built patch to order file # append new built patch to order file
issue_cmd("sed -i '/^%s$/d' %s" % (patch_id, self._std_local_path(self.order_file))) issue_cmd("sed -i '/^%s$/d' %s" % (patch_id, self._std_local_path(self.order_file)))
@ -544,8 +552,8 @@ class PatchList:
if capture_source_flag: if capture_source_flag:
prd.capture_source() prd.capture_source()
# It is important to break here. # It is important to break here.
# We just edited the patches_to_build which an enclosing for loop is iterating over. # We just edited the patches_to_build which an enclosing for loop is iterating over.
# without the break, the result is skipping patches and/or building patches out of order. # without the break, the result is skipping patches and/or building patches out of order.
break break
else: else:
@ -635,7 +643,6 @@ class PackageData:
raise PatchRecipeXMLFail(msg) raise PatchRecipeXMLFail(msg)
sys.exit(2) sys.exit(2)
def gen_xml(self, e_package): def gen_xml(self, e_package):
for personality in self.personalities: for personality in self.personalities:
add_text_tag_to_xml(e_package, 'PERSONALITY', personality) add_text_tag_to_xml(e_package, 'PERSONALITY', personality)
@ -670,17 +677,17 @@ class PackageData:
file_path = "%s/%s" % (rpm_dir, file) file_path = "%s/%s" % (rpm_dir, file)
if os.path.isfile(file_path): if os.path.isfile(file_path):
print "cleaning match %s\n" % file print "cleaning match %s\n" % file
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path ] rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s" % file_path]
rpm_name = issue_cmd_w_stdout(rpm_name_cmd) rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
if rpm_name == self.name: if rpm_name == self.name:
rpm_release_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path ] rpm_release_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{RELEASE}", "%s" % file_path]
rpm_release = issue_cmd_w_stdout(rpm_release_cmd) rpm_release = issue_cmd_w_stdout(rpm_release_cmd)
print "cleaning release %s" % rpm_release print "cleaning release %s" % rpm_release
rm_cmd = "rm -f %s/%s-*-%s.%s.rpm" % (rpm_dir, self.name, rpm_release, arch) rm_cmd = "rm -f %s/%s-*-%s.%s.rpm" % (rpm_dir, self.name, rpm_release, arch)
issue_cmd(rm_cmd) issue_cmd(rm_cmd)
def clean(self, prebuilt=False): def clean(self, prebuilt=False):
print "package clean" print "package clean"
self._clean_rpms(prebuilt=prebuilt) self._clean_rpms(prebuilt=prebuilt)
def _add_rpms(self, pf, arch=ARCH_DEFAULT, fatal=True, prebuilt=False): def _add_rpms(self, pf, arch=ARCH_DEFAULT, fatal=True, prebuilt=False):
@ -738,7 +745,7 @@ class PackageData:
# break # break
if not reject: if not reject:
rpm_name_cmd = [ "rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file) ] rpm_name_cmd = ["rpm", "-qp", "--dbpath", temp_rpm_db_dir, "--queryformat", "%{NAME}", "%s/%s" % (rpm_dir, file)]
rpm_name = issue_cmd_w_stdout(rpm_name_cmd) rpm_name = issue_cmd_w_stdout(rpm_name_cmd)
if rpm_name != self.name: if rpm_name != self.name:
print "reject file '%s' due to rpm_name '%s'" % (file, rpm_name) print "reject file '%s' due to rpm_name '%s'" % (file, rpm_name)
@ -784,6 +791,7 @@ class PackageData:
raise PatchPackagingFail(msg) raise PatchPackagingFail(msg)
sys.exit(2) sys.exit(2)
class RecipeData: class RecipeData:
""" """
Recipe data Recipe data
@ -791,7 +799,7 @@ class RecipeData:
def __init__(self, e): def __init__(self, e):
self.name = None self.name = None
self.prebuilt = False self.prebuilt = False
self.packages = collections.OrderedDict() # map package name to PackageData self.packages = collections.OrderedDict() # map package name to PackageData
self._parse_recipe(e) self._parse_recipe(e)
def __str__(self): def __str__(self):
@ -863,7 +871,7 @@ class RecipeData:
self.packages[package].gen_xml(e_package) self.packages[package].gen_xml(e_package)
def clean(self): def clean(self):
print "recipe clean" print "recipe clean"
if not self.prebuilt: if not self.prebuilt:
for package in self.packages: for package in self.packages:
self.packages[package].clean(prebuilt=self.prebuilt) self.packages[package].clean(prebuilt=self.prebuilt)
@ -890,7 +898,7 @@ class RecipeData:
if os.path.isfile(path): if os.path.isfile(path):
rc = issue_cmd_rc("%s %s %s >> %s/%s.log" % (path, self.name, extra_arg, os.environ['DEST'], os.environ['PREFIX'])) rc = issue_cmd_rc("%s %s %s >> %s/%s.log" % (path, self.name, extra_arg, os.environ['DEST'], os.environ['PREFIX']))
def build_patch(self, pf, fatal=True): def build_patch(self, pf, fatal=True):
for package in self.packages: for package in self.packages:
self.packages[package].build_patch(pf, fatal=fatal, prebuilt=self.prebuilt) self.packages[package].build_patch(pf, fatal=fatal, prebuilt=self.prebuilt)
@ -903,6 +911,7 @@ class RecipeData:
print "=========== is_prebuilt prebuilt=%s for %s =============" % (self.prebuilt, self.name) print "=========== is_prebuilt prebuilt=%s for %s =============" % (self.prebuilt, self.name)
return self.prebuilt return self.prebuilt
class PatchRecipeData: class PatchRecipeData:
""" """
Patch recipe data Patch recipe data
@ -951,7 +960,6 @@ class PatchRecipeData:
rc = rc2 rc = rc2
return rc return rc
def set_implicit_requires(self, patch_list): def set_implicit_requires(self, patch_list):
self.auto_requires = patch_list.get_implicit_requires(self.patch_id, self.recipies.keys()) self.auto_requires = patch_list.get_implicit_requires(self.patch_id, self.recipies.keys())
@ -1100,8 +1108,8 @@ class PatchRecipeData:
def recursive_print(self, e, depth=0): def recursive_print(self, e, depth=0):
for child in e: for child in e:
print "%sTag: %s, attr: %s, text: %s" % (" "*depth, child.tag, child.attrib, child.text and child.text.strip() or "") print "%sTag: %s, attr: %s, text: %s" % (" " * depth, child.tag, child.attrib, child.text and child.text.strip() or "")
self.recursive_print(child.getchildren(), depth+1) self.recursive_print(child.getchildren(), depth + 1)
# for child in e.iter('BUILD'): # for child in e.iter('BUILD'):
# print "Tag: %s, attr: %s" % (child.tag, child.attrib) # print "Tag: %s, attr: %s" % (child.tag, child.attrib)
@ -1162,7 +1170,7 @@ class PatchRecipeData:
write_xml_file(e_top, fname) write_xml_file(e_top, fname)
def __str__(self): def __str__(self):
return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies,keys())) return "[ patch_id: %s, context: %s, metadata: %s, requires: %s, recipies: %s ]" % (str(self.patch_id), str(self.build_context), str(self.metadata), str(self.requires), str(self.recipies, keys()))
def myprint(self, indent=""): def myprint(self, indent=""):
print "patch_id: %s" % str(self.patch_id) print "patch_id: %s" % str(self.patch_id)
@ -1205,7 +1213,7 @@ class PatchRecipeData:
if self.build_context is not None: if self.build_context is not None:
# Before checkout, make sure there are no untracked temporary files # Before checkout, make sure there are no untracked temporary files
# left by a previous build that may prevent the checkout... # left by a previous build that may prevent the checkout...
# e.g. horizon's pbr-2015.1.0-py2.7.egg directory is a build artifact # e.g. horizon's pbr-2015.1.0-py2.7.egg directory is a build artifact
issue_cmd("for d in $(find . -type d -name .git | xargs --max-args=1 dirname); do (cd $d; echo $d; git clean -df; git reset --hard; git ls-files --others --exclude-standard | xargs --no-run-if-empty rm; if [ ! -f .subgits ]; then if [ -f .gitignore ]; then git ls-files --others --ignored --exclude-from=.gitignore | xargs --no-run-if-empty rm; fi; fi); done") issue_cmd("for d in $(find . -type d -name .git | xargs --max-args=1 dirname); do (cd $d; echo $d; git clean -df; git reset --hard; git ls-files --others --exclude-standard | xargs --no-run-if-empty rm; if [ ! -f .subgits ]; then if [ -f .gitignore ]; then git ls-files --others --ignored --exclude-from=.gitignore | xargs --no-run-if-empty rm; fi; fi); done")
issue_cmd("wrgit checkout %s" % self.build_context) issue_cmd("wrgit checkout %s" % self.build_context)
@ -1229,7 +1237,6 @@ class PatchRecipeData:
return True return True
def _get_prev_patch_id(self, patch_id): def _get_prev_patch_id(self, patch_id):
patch_order_file = self.pl._std_local_path(self.pl.order_file) patch_order_file = self.pl._std_local_path(self.pl.order_file)
prev_patch_id = None prev_patch_id = None
@ -1240,7 +1247,7 @@ class PatchRecipeData:
return prev_patch_id return prev_patch_id
prev_patch_id = this_patch_id prev_patch_id = this_patch_id
return prev_patch_id return prev_patch_id
def _get_rpm_db_path(self, patch_id): def _get_rpm_db_path(self, patch_id):
rpm_db = self.pl._std_local_path("%s.rpm_db" % patch_id) rpm_db = self.pl._std_local_path("%s.rpm_db" % patch_id)
return rpm_db return rpm_db
@ -1257,7 +1264,7 @@ class PatchRecipeData:
issue_cmd("rpm -qp --dbpath %s --queryformat '%s %%{NAME} %%{RELEASE}\n' %s/*rpm >> %s 2> /dev/null" % (temp_rpm_db_dir, subdir, rpm_sub_dir, rpm_db)) issue_cmd("rpm -qp --dbpath %s --queryformat '%s %%{NAME} %%{RELEASE}\n' %s/*rpm >> %s 2> /dev/null" % (temp_rpm_db_dir, subdir, rpm_sub_dir, rpm_db))
def _read_rpm_db(self, patch_id): def _read_rpm_db(self, patch_id):
release_map={} release_map = {}
rpm_db_dir = "export/patch_data" rpm_db_dir = "export/patch_data"
rpm_db = self._get_rpm_db_path(patch_id) rpm_db = self._get_rpm_db_path(patch_id)
with open(rpm_db) as f: with open(rpm_db) as f:
@ -1276,7 +1283,7 @@ class PatchRecipeData:
delim = "_" delim = "_"
words = self.patch_id.split(delim) words = self.patch_id.split(delim)
l = len(words[-1]) l = len(words[-1])
words[-1] = '0'*l words[-1] = '0' * l
prev_patch_id = delim.join(words) prev_patch_id = delim.join(words)
prev_release_map = self._read_rpm_db(prev_patch_id) prev_release_map = self._read_rpm_db(prev_patch_id)
release_map = self._read_rpm_db(self.patch_id) release_map = self._read_rpm_db(self.patch_id)
@ -1290,7 +1297,7 @@ class PatchRecipeData:
os.environ['DEST'] = "%s/export/patch_source/%s" % (os.environ['MY_PATCH_WORKSPACE'], self.patch_id) os.environ['DEST'] = "%s/export/patch_source/%s" % (os.environ['MY_PATCH_WORKSPACE'], self.patch_id)
issue_cmd("mkdir -p %s" % os.environ['DEST']) issue_cmd("mkdir -p %s" % os.environ['DEST'])
for recipe in self.recipies.keys(): for recipe in self.recipies.keys():
print "capture source of recipe %s" % recipe print "capture source of recipe %s" % recipe
self.recipies[recipe].capture_source() self.recipies[recipe].capture_source()
def build_patch(self, local_path="."): def build_patch(self, local_path="."):
@ -1305,8 +1312,8 @@ class PatchRecipeData:
recipe_str += recipe + " " recipe_str += recipe + " "
if not self.recipies[recipe].is_prebuilt(): if not self.recipies[recipe].is_prebuilt():
build_recipe_str += recipe + " " build_recipe_str += recipe + " "
print "recipe_str = %s" % recipe_str print "recipe_str = %s" % recipe_str
print "build_recipe_str = %s" % build_recipe_str print "build_recipe_str = %s" % build_recipe_str
if recipe_str == "": if recipe_str == "":
msg = "No recipies for patch %s" % self.patch_id msg = "No recipies for patch %s" % self.patch_id
LOG.exception(msg) LOG.exception(msg)
@ -1326,15 +1333,15 @@ class PatchRecipeData:
if not pre_compiled_flag: if not pre_compiled_flag:
# compile patch # compile patch
os.chdir(workdir) os.chdir(workdir)
print "pre clean" print "pre clean"
if build_recipe_str == "": if build_recipe_str == "":
print " ... nothing to clean" print " ... nothing to clean"
else: else:
issue_cmd("build-pkgs --no-build-info --clean %s" % build_recipe_str) issue_cmd("build-pkgs --no-build-info --clean %s" % build_recipe_str)
for recipe in self.recipies.keys(): for recipe in self.recipies.keys():
print "pre clean recipe %s" % recipe print "pre clean recipe %s" % recipe
self.recipies[recipe].clean() self.recipies[recipe].clean()
print "Build" print "Build"
if build_recipe_str == "": if build_recipe_str == "":
print " ... nothing to build" print " ... nothing to build"
else: else:
@ -1377,7 +1384,7 @@ class PatchRecipeData:
if not pre_compiled_flag: if not pre_compiled_flag:
self.recipies[recipe].build_patch(pf, fatal=True) self.recipies[recipe].build_patch(pf, fatal=True)
else: else:
try: try:
self.recipies[recipe].build_patch(pf, fatal=False) self.recipies[recipe].build_patch(pf, fatal=False)
except PatchPackagingMiss: except PatchPackagingMiss:
print "Warning: attempting rebuild of recipe %s" % self.recipies[recipe].name print "Warning: attempting rebuild of recipe %s" % self.recipies[recipe].name
@ -1385,20 +1392,21 @@ class PatchRecipeData:
issue_cmd("build-pkgs --no-build-info --careful %s" % self.recipies[recipe].name) issue_cmd("build-pkgs --no-build-info --careful %s" % self.recipies[recipe].name)
self.recipies[recipe].build_patch(pf, fatal=True) self.recipies[recipe].build_patch(pf, fatal=True)
local_path = self.pl._std_local_path("")
local_path=self.pl._std_local_path("")
print "=== local_path = %s ===" % local_path print "=== local_path = %s ===" % local_path
pf.gen_patch(outdir=local_path) pf.gen_patch(outdir=local_path)
return True return True
def _tag_build_context(): def _tag_build_context():
os.chdir(srcdir) os.chdir(srcdir)
issue_cmd("for e in . `wrgit all-core-gits` ; do (cd $e ; git tag v%s) done" % self.patch_id) issue_cmd("for e in . `wrgit all-core-gits` ; do (cd $e ; git tag v%s) done" % self.patch_id)
def read_build_info(): def read_build_info():
try: try:
build_info_find_cmd = [ "find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm" ] build_info_find_cmd = ["find", "std/rpmbuild/RPMS/", "-name", "build-info-[0-9]*.x86_64.rpm"]
build_info_path = issue_cmd_w_stdout(build_info_find_cmd) build_info_path = issue_cmd_w_stdout(build_info_find_cmd)
if build_info_path == "": if build_info_path == "":
issue_cmd("build-pkgs --no-descendants build-info") issue_cmd("build-pkgs --no-descendants build-info")
@ -1411,7 +1419,7 @@ def read_build_info():
continue continue
if len(line) == 0: if len(line) == 0:
continue continue
name, var = line.partition("=")[::2] name, var = line.partition("=")[::2]
name = name.strip() name = name.strip()
var = var.strip() var = var.strip()
@ -1422,10 +1430,12 @@ def read_build_info():
return False return False
return True return True
def patch_id_to_tag(patch_id): def patch_id_to_tag(patch_id):
tag = "v%s" % patch_id tag = "v%s" % patch_id
return tag return tag
def validate_tag(tag): def validate_tag(tag):
try: try:
cmd = "git tag | grep %s" % tag cmd = "git tag | grep %s" % tag
@ -1437,6 +1447,7 @@ def validate_tag(tag):
return False return False
return True return True
def issue_cmd_w_stdout(cmd): def issue_cmd_w_stdout(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate()[0] out = p.communicate()[0]
@ -1447,7 +1458,7 @@ def issue_cmd_w_stdout(cmd):
print msg print msg
raise PatchRecipeCmdFail(msg) raise PatchRecipeCmdFail(msg)
return out return out
def issue_cmd(cmd): def issue_cmd(cmd):
print "CMD: %s" % cmd print "CMD: %s" % cmd
@ -1458,6 +1469,7 @@ def issue_cmd(cmd):
print msg print msg
raise PatchRecipeCmdFail(msg) raise PatchRecipeCmdFail(msg)
def issue_cmd_no_raise(cmd): def issue_cmd_no_raise(cmd):
print "CMD: %s" % cmd print "CMD: %s" % cmd
rc = subprocess.call(cmd, shell=True) rc = subprocess.call(cmd, shell=True)
@ -1466,11 +1478,13 @@ def issue_cmd_no_raise(cmd):
LOG.exception(msg) LOG.exception(msg)
print msg print msg
def issue_cmd_rc(cmd): def issue_cmd_rc(cmd):
print "CMD: %s" % cmd print "CMD: %s" % cmd
rc = subprocess.call(cmd, shell=True) rc = subprocess.call(cmd, shell=True)
return rc return rc
def set_capture_source_path(): def set_capture_source_path():
global capture_source_path global capture_source_path
my_repo = None my_repo = None
@ -1491,6 +1505,7 @@ def set_capture_source_path():
if rc == 0: if rc == 0:
capture_source_path = new_path capture_source_path = new_path
def capture_rpms(): def capture_rpms():
for build_type in BUILD_TYPES: for build_type in BUILD_TYPES:
src_rpm_dir = "%s/%s/%s" % (workdir, build_type, RPM_DIR) src_rpm_dir = "%s/%s/%s" % (workdir, build_type, RPM_DIR)
@ -1499,12 +1514,14 @@ def capture_rpms():
issue_cmd("mkdir -p %s" % dest_rpm_dir) issue_cmd("mkdir -p %s" % dest_rpm_dir)
issue_cmd("rsync -avu %s/*.rpm %s" % (src_rpm_dir, dest_rpm_dir)) issue_cmd("rsync -avu %s/*.rpm %s" % (src_rpm_dir, dest_rpm_dir))
def modify_patch_usage(): def modify_patch_usage():
msg = "modify_patch [ --obsolete | --released | --development ] [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ]" msg = "modify_patch [ --obsolete | --released | --development ] [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ]"
LOG.exception(msg) LOG.exception(msg)
print msg print msg
sys.exit(1) sys.exit(1)
def modify_patch(): def modify_patch():
global workdir global workdir
global temp_rpm_db_dir global temp_rpm_db_dir
@ -1523,11 +1540,10 @@ def modify_patch():
'sw_version=', 'sw_version=',
'id=', 'id=',
'file=', 'file=',
]) ])
except getopt.GetoptError as e: except getopt.GetoptError as e:
print str(e) print str(e)
modify_patch_usage() modify_patch_usage()
patch_path = None patch_path = None
cwd = os.getcwd() cwd = os.getcwd()
@ -1572,7 +1588,7 @@ def modify_patch():
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb") temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
if patch_path is not None: if patch_path is not None:
rc = PatchFile.modify_patch(patch_path, "status", new_status) rc = PatchFile.modify_patch(patch_path, "status", new_status)
assert(rc == True) assert(rc)
print "Patch '%s' has been modified to status '%s'" % (patch_path, new_status) print "Patch '%s' has been modified to status '%s'" % (patch_path, new_status)
else: else:
if sw_version is None or patch_id is None: if sw_version is None or patch_id is None:
@ -1588,7 +1604,7 @@ def modify_patch():
print "patch_file_name = %s" % patch_file_name print "patch_file_name = %s" % patch_file_name
print "patch_path = %s" % patch_path print "patch_path = %s" % patch_path
rc = PatchFile.modify_patch(patch_path, "status", new_status) rc = PatchFile.modify_patch(patch_path, "status", new_status)
assert(rc == True) assert(rc)
os.chdir(pl._std_patch_git_path("..")) os.chdir(pl._std_patch_git_path(".."))
issue_cmd("git add %s" % patch_path) issue_cmd("git add %s" % patch_path)
issue_cmd("git commit -m \"Modify status of patch '%s' to '%s'\"" % (patch_id, new_status)) issue_cmd("git commit -m \"Modify status of patch '%s' to '%s'\"" % (patch_id, new_status))
@ -1652,7 +1668,7 @@ def modify_patch():
human_release = "Titanium Cloud 4" human_release = "Titanium Cloud 4"
windshare_folder = "Titanium-Cloud-4" windshare_folder = "Titanium-Cloud-4"
if sw_version == "18.03" || sw_version == "18.03" if sw_version == "18.03" or sw_version == "18.03":
local_dest = "/folk/cgts/rel-ops/%s/patches/" % sw_version local_dest = "/folk/cgts/rel-ops/%s/patches/" % sw_version
deliver_dest = "/folk/prj-wrlinux/release/tis/tis-5/update/ti%s-%s/Titanium-Cloud-5/patches" % (ts, munged_patch_id) deliver_dest = "/folk/prj-wrlinux/release/tis/tis-5/update/ti%s-%s/Titanium-Cloud-5/patches" % (ts, munged_patch_id)
human_release = "Titanium Cloud 5" human_release = "Titanium Cloud 5"
@ -1700,7 +1716,7 @@ def modify_patch():
print "Failed to modify patch!" print "Failed to modify patch!"
finally: finally:
shutil.rmtree(workdir) shutil.rmtree(workdir)
def query_patch_usage(): def query_patch_usage():
msg = "query_patch [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ] [ --field <field_name> ]" msg = "query_patch [ --sw_version <version> --id <patch_id> | --file <patch_path.patch> ] [ --field <field_name> ]"
@ -1711,6 +1727,7 @@ def query_patch_usage():
print msg print msg
sys.exit(1) sys.exit(1)
def query_patch(): def query_patch():
global workdir global workdir
global temp_rpm_db_dir global temp_rpm_db_dir
@ -1727,12 +1744,11 @@ def query_patch():
'id=', 'id=',
'file=', 'file=',
'field=', 'field=',
]) ])
except getopt.GetoptError as e: except getopt.GetoptError as e:
print str(e) print str(e)
query_patch_usage() query_patch_usage()
patch_path = None patch_path = None
cwd = os.getcwd() cwd = os.getcwd()
field = None field = None
@ -1758,7 +1774,7 @@ def query_patch():
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb") temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
if patch_path is not None: if patch_path is not None:
answer = PatchFile.query_patch(patch_path, field=field) answer = PatchFile.query_patch(patch_path, field=field)
field_order=['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings'] field_order = ['id', 'sw_version', 'status', 'cert', 'reboot_required', 'unremovable', 'summary', 'description', 'install_instructions', 'warnings']
for k in field_order: for k in field_order:
if k in answer.keys(): if k in answer.keys():
print "%s: '%s'" % (k, answer[k]) print "%s: '%s'" % (k, answer[k])
@ -1794,6 +1810,7 @@ def make_patch_usage():
print msg print msg
sys.exit(1) sys.exit(1)
def make_patch(): def make_patch():
global workdir global workdir
global temp_rpm_db_dir global temp_rpm_db_dir
@ -1825,7 +1842,7 @@ def make_patch():
'srcdir=', 'srcdir=',
'branch=', 'branch=',
'sw_version=', 'sw_version=',
]) ])
except getopt.GetoptError as e: except getopt.GetoptError as e:
print str(e) print str(e)
make_patch_usage() make_patch_usage()
@ -1894,14 +1911,14 @@ def make_patch():
# TODO if branch is not None or workdir is not None or srcdir is not None: # TODO if branch is not None or workdir is not None or srcdir is not None:
# TODO print "If --formal is specified, then srcdir, workdir and branch are automatci and must not be specified" # TODO print "If --formal is specified, then srcdir, workdir and branch are automatci and must not be specified"
# TODO make_patch_usage() # TODO make_patch_usage()
if pre_compiled_flag and formal_flag: if pre_compiled_flag and formal_flag:
print "invalid options: --formal and --pre-compiled can't be used together." print "invalid options: --formal and --pre-compiled can't be used together."
make_patch_usage() make_patch_usage()
if workdir is not None: if workdir is not None:
if not os.path.isdir(workdir): if not os.path.isdir(workdir):
print "invalid directory: workdir = '%s'" % workdir print "invalid directory: workdir = '%s'" % workdir
make_patch_usage() make_patch_usage()
temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb") temp_rpm_db_dir = "%s/%s" % (workdir, ".rpmdb")
@ -1916,7 +1933,6 @@ def make_patch():
print "invalid patch file path: '%s'" % patch print "invalid patch file path: '%s'" % patch
make_patch_usage() make_patch_usage()
if 'MY_REPO' in os.environ: if 'MY_REPO' in os.environ:
MY_REPO = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_REPO']))) MY_REPO = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_REPO'])))
else: else:
@ -1952,7 +1968,7 @@ def make_patch():
else: else:
print "ERROR: environment variable 'MY_BUILD_CFG' is not defined" print "ERROR: environment variable 'MY_BUILD_CFG' is not defined"
sys.exit(1) sys.exit(1)
if 'MY_BUILD_DIR' in os.environ: if 'MY_BUILD_DIR' in os.environ:
MY_BUILD_DIR = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_BUILD_DIR']))) MY_BUILD_DIR = os.path.normpath(os.path.join(cwd, os.path.expanduser(os.environ['MY_BUILD_DIR'])))
else: else:
@ -1989,6 +2005,5 @@ def make_patch():
# sign formal patch # sign formal patch
pl.sign_official_patches() pl.sign_official_patches()
# deliver to git repo # deliver to git repo
pl.deliver_official_patch() pl.deliver_official_patch()

View File

@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0 SPDX-License-Identifier: Apache-2.0
""" """

View File

@ -9,19 +9,19 @@ from oslo_config import cfg
API_SERVICE_OPTS = [ API_SERVICE_OPTS = [
cfg.StrOpt('api_bind_ip', cfg.StrOpt('api_bind_ip',
default='127.0.0.1', default='127.0.0.1',
help='IP for the Patching controller API server to bind to', help='IP for the Patching controller API server to bind to',
), ),
cfg.IntOpt('api_port', cfg.IntOpt('api_port',
default=5487, default=5487,
help='The port for the Patching controller API server', help='The port for the Patching controller API server',
), ),
cfg.IntOpt('api_limit_max', cfg.IntOpt('api_limit_max',
default=1000, default=1000,
help='the maximum number of items returned in a single ' help='the maximum number of items returned in a single '
'response from a collection resource'), 'response from a collection resource'),
] ]
CONF = cfg.CONF CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api', opt_group = cfg.OptGroup(name='api',

View File

@ -5,12 +5,10 @@ SPDX-License-Identifier: Apache-2.0
""" """
#from oslo.config import cfg
import pecan import pecan
from cgcs_patch.api import config from cgcs_patch.api import config
#CONF = cfg.CONF
def get_pecan_config(): def get_pecan_config():
# Set up the pecan configuration # Set up the pecan configuration
@ -30,7 +28,7 @@ def setup_app(pecan_config=None):
template_path=pecan_config.app.template_path, template_path=pecan_config.app.template_path,
debug=False, debug=False,
force_canonical=getattr(pecan_config.app, 'force_canonical', True), force_canonical=getattr(pecan_config.app, 'force_canonical', True),
guess_content_type_from_ext=False, # Avoid mime-type lookup guess_content_type_from_ext=False, # Avoid mime-type lookup
) )
return app return app

View File

@ -4,4 +4,3 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0 SPDX-License-Identifier: Apache-2.0
""" """

View File

@ -16,6 +16,7 @@ from cgcs_patch.patch_controller import pc
import logging import logging
from cgcs_patch.patch_functions import LOG from cgcs_patch.patch_functions import LOG
class PatchAPIController(object): class PatchAPIController(object):
@expose('json') @expose('json')
@ -107,7 +108,7 @@ class PatchAPIController(object):
# currently 64K chunk size is selected # currently 64K chunk size is selected
dst = os.open(fn, os.O_WRONLY | os.O_CREAT) dst = os.open(fn, os.O_WRONLY | os.O_CREAT)
src = fileitem.file.fileno() src = fileitem.file.fileno()
size = 64*1024 size = 64 * 1024
n = size n = size
while n >= size: while n >= size:
s = os.read(src, size) s = os.read(src, size)
@ -263,4 +264,3 @@ class RootController(object):
patch = PatchAPIController() patch = PatchAPIController()
v1 = PatchAPIController() v1 = PatchAPIController()

View File

@ -6,16 +6,16 @@
from oslo_config import cfg from oslo_config import cfg
API_SERVICE_OPTS = [ API_SERVICE_OPTS = [
cfg.StrOpt('auth_api_bind_ip', cfg.StrOpt('auth_api_bind_ip',
default=None, default=None,
help='IP for the authenticated Patching API server to bind to'), help='IP for the authenticated Patching API server to bind to'),
cfg.IntOpt('auth_api_port', cfg.IntOpt('auth_api_port',
default=5491, default=5491,
help='The port for the authenticated Patching API server'), help='The port for the authenticated Patching API server'),
cfg.IntOpt('api_limit_max', cfg.IntOpt('api_limit_max',
default=1000, default=1000,
help='the maximum number of items returned in a single ' help='the maximum number of items returned in a single '
'response from a collection resource') 'response from a collection resource')
] ]
CONF = cfg.CONF CONF = cfg.CONF

View File

@ -4,13 +4,15 @@ Copyright (c) 2014-2017 Wind River Systems, Inc.
SPDX-License-Identifier: Apache-2.0 SPDX-License-Identifier: Apache-2.0
""" """
"""Access Control Lists (ACL's) control access the API server."""
from cgcs_patch.authapi import auth_token from cgcs_patch.authapi import auth_token
OPT_GROUP_NAME = 'keystone_authtoken' OPT_GROUP_NAME = 'keystone_authtoken'
"""Access Control Lists (ACL's) control access the API server."""
def install(app, conf, public_routes): def install(app, conf, public_routes):
"""Install ACL check on application. """Install ACL check on application.
@ -21,7 +23,7 @@ def install(app, conf, public_routes):
:return: The same WSGI application with ACL installed. :return: The same WSGI application with ACL installed.
""" """
keystone_config = dict(conf.items(OPT_GROUP_NAME)) keystone_config = dict(conf.items(OPT_GROUP_NAME))
return auth_token.AuthTokenMiddleware(app, return auth_token.AuthTokenMiddleware(app,
conf=keystone_config, conf=keystone_config,

View File

@ -17,9 +17,9 @@ import ConfigParser
auth_opts = [ auth_opts = [
cfg.StrOpt('auth_strategy', cfg.StrOpt('auth_strategy',
default='keystone', default='keystone',
help='Method to use for auth: noauth or keystone.'), help='Method to use for auth: noauth or keystone.'),
] ]
CONF = cfg.CONF CONF = cfg.CONF
CONF.register_opts(auth_opts) CONF.register_opts(auth_opts)
@ -58,7 +58,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
debug=False, debug=False,
force_canonical=getattr(pecan_config.app, 'force_canonical', True), force_canonical=getattr(pecan_config.app, 'force_canonical', True),
hooks=app_hooks, hooks=app_hooks,
guess_content_type_from_ext=False, # Avoid mime-type lookup guess_content_type_from_ext=False, # Avoid mime-type lookup
) )
if pecan_config.app.enable_acl: if pecan_config.app.enable_acl:

View File

@ -30,6 +30,7 @@ from sysinv.openstack.common import policy
_POLICY_PATH = None _POLICY_PATH = None
_POLICY_CACHE = {} _POLICY_CACHE = {}
def reset(): def reset():
global _POLICY_PATH global _POLICY_PATH
global _POLICY_CACHE global _POLICY_CACHE

View File

@ -146,7 +146,7 @@ class PatchService:
def audit_socket(self): def audit_socket(self):
# Ensure multicast address is still allocated # Ensure multicast address is still allocated
cmd = "ip maddr show %s | awk 'BEGIN { ORS=\"\" }; {if ($2 == \"%s\") print $2}'" % \ cmd = "ip maddr show %s | awk 'BEGIN {ORS=\"\"}; {if ($2 == \"%s\") print $2}'" % \
(cfg.get_mgmt_iface(), self.mcast_addr) (cfg.get_mgmt_iface(), self.mcast_addr)
try: try:
result = subprocess.check_output(cmd, shell=True) result = subprocess.check_output(cmd, shell=True)
@ -163,4 +163,3 @@ class PatchService:
LOG.info("Unable to setup sockets. Waiting to retry") LOG.info("Unable to setup sockets. Waiting to retry")
time.sleep(5) time.sleep(5)
LOG.info("Multicast address reconfigured") LOG.info("Multicast address reconfigured")

View File

@ -27,7 +27,7 @@ dev_certificate = b"""-----BEGIN CERTIFICATE-----
Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u
-----END CERTIFICATE-----""" -----END CERTIFICATE-----"""
formal_certificate=b"""-----BEGIN CERTIFICATE----- formal_certificate = b"""-----BEGIN CERTIFICATE-----
MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT

View File

@ -26,6 +26,7 @@ platform_conf_mtime = 0
patching_conf_mtime = 0 patching_conf_mtime = 0
patching_conf = '/etc/patching/patching.conf' patching_conf = '/etc/patching/patching.conf'
def read_config(): def read_config():
global patching_conf_mtime global patching_conf_mtime
global patching_conf global patching_conf
@ -122,5 +123,3 @@ def get_mgmt_iface():
logging.exception("Failed to read management_interface from config") logging.exception("Failed to read management_interface from config")
return None return None
return mgmt_if return mgmt_if

View File

@ -5,6 +5,7 @@ SPDX-License-Identifier: Apache-2.0
""" """
class PatchError(Exception): class PatchError(Exception):
"""Base class for patching exceptions.""" """Base class for patching exceptions."""
@ -42,4 +43,4 @@ class PatchValidationFailure(PatchError):
class PatchMismatchFailure(PatchError): class PatchMismatchFailure(PatchError):
"""Patch validation error.""" """Patch validation error."""
pass pass

View File

@ -46,18 +46,18 @@ run_insvc_patch_scripts_cmd = "/usr/sbin/run-patch-scripts"
pa = None pa = None
# Smart commands # Smart commands
smart_cmd = [ "/usr/bin/smart" ] smart_cmd = ["/usr/bin/smart"]
smart_quiet = smart_cmd + [ "--quiet" ] smart_quiet = smart_cmd + ["--quiet"]
smart_update = smart_quiet + [ "update" ] smart_update = smart_quiet + ["update"]
smart_newer = smart_quiet + [ "newer" ] smart_newer = smart_quiet + ["newer"]
smart_orphans = smart_quiet + [ "query", "--orphans", "--show-format", "$name\n" ] smart_orphans = smart_quiet + ["query", "--orphans", "--show-format", "$name\n"]
smart_query = smart_quiet + [ "query" ] smart_query = smart_quiet + ["query"]
smart_query_repos = smart_quiet + [ "query", "--channel=base", "--channel=updates" ] smart_query_repos = smart_quiet + ["query", "--channel=base", "--channel=updates"]
smart_install_cmd = smart_cmd + [ "install", "--yes", "--explain" ] smart_install_cmd = smart_cmd + ["install", "--yes", "--explain"]
smart_remove_cmd = smart_cmd + [ "remove", "--yes", "--explain" ] smart_remove_cmd = smart_cmd + ["remove", "--yes", "--explain"]
smart_query_installed = smart_quiet + [ "query", "--installed", "--show-format", "$name $version\n" ] smart_query_installed = smart_quiet + ["query", "--installed", "--show-format", "$name $version\n"]
smart_query_base = smart_quiet + [ "query", "--channel=base", "--show-format", "$name $version\n" ] smart_query_base = smart_quiet + ["query", "--channel=base", "--show-format", "$name $version\n"]
smart_query_updates = smart_quiet + [ "query", "--channel=updates", "--show-format", "$name $version\n" ] smart_query_updates = smart_quiet + ["query", "--channel=updates", "--show-format", "$name $version\n"]
def setflag(fname): def setflag(fname):
@ -334,7 +334,7 @@ class PatchAgent(PatchService):
# Get the current channel config # Get the current channel config
try: try:
output = subprocess.check_output(smart_cmd + output = subprocess.check_output(smart_cmd +
[ "channel", "--yaml" ], ["channel", "--yaml"],
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
config = yaml.load(output) config = yaml.load(output)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
@ -345,18 +345,18 @@ class PatchAgent(PatchService):
LOG.exception("Failed to query channels") LOG.exception("Failed to query channels")
return False return False
expected = [ { 'channel': 'rpmdb', expected = [{'channel': 'rpmdb',
'type': 'rpm-sys', 'type': 'rpm-sys',
'name': 'RPM Database', 'name': 'RPM Database',
'baseurl': None }, 'baseurl': None},
{ 'channel': 'base', {'channel': 'base',
'type': 'rpm-md', 'type': 'rpm-md',
'name': 'Base', 'name': 'Base',
'baseurl': "http://controller/feed/rel-%s" % SW_VERSION}, 'baseurl': "http://controller/feed/rel-%s" % SW_VERSION},
{ 'channel': 'updates', {'channel': 'updates',
'type': 'rpm-md', 'type': 'rpm-md',
'name': 'Patches', 'name': 'Patches',
'baseurl': "http://controller/updates/rel-%s" % SW_VERSION} ] 'baseurl': "http://controller/updates/rel-%s" % SW_VERSION}]
updated = False updated = False
@ -367,7 +367,7 @@ class PatchAgent(PatchService):
ch_baseurl = item['baseurl'] ch_baseurl = item['baseurl']
add_channel = False add_channel = False
if channel in config: if channel in config:
# Verify existing channel config # Verify existing channel config
if (config[channel].get('type') != ch_type or if (config[channel].get('type') != ch_type or
@ -378,8 +378,8 @@ class PatchAgent(PatchService):
LOG.warning("Invalid smart config found for %s" % channel) LOG.warning("Invalid smart config found for %s" % channel)
try: try:
output = subprocess.check_output(smart_cmd + output = subprocess.check_output(smart_cmd +
[ "channel", "--yes", ["channel", "--yes",
"--remove", channel ], "--remove", channel],
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
LOG.exception("Failed to configure %s channel" % channel) LOG.exception("Failed to configure %s channel" % channel)
@ -392,11 +392,11 @@ class PatchAgent(PatchService):
if add_channel: if add_channel:
LOG.info("Adding channel %s" % channel) LOG.info("Adding channel %s" % channel)
cmd_args = [ "channel", "--yes", "--add", channel, cmd_args = ["channel", "--yes", "--add", channel,
"type=%s" % ch_type, "type=%s" % ch_type,
"name=%s" % ch_name ] "name=%s" % ch_name]
if ch_baseurl is not None: if ch_baseurl is not None:
cmd_args += [ "baseurl=%s" % ch_baseurl ] cmd_args += ["baseurl=%s" % ch_baseurl]
try: try:
output = subprocess.check_output(smart_cmd + cmd_args, output = subprocess.check_output(smart_cmd + cmd_args,
@ -411,7 +411,7 @@ class PatchAgent(PatchService):
# Validate the smart config # Validate the smart config
try: try:
output = subprocess.check_output(smart_cmd + output = subprocess.check_output(smart_cmd +
[ "config", "--yaml" ], ["config", "--yaml"],
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
config = yaml.load(output) config = yaml.load(output)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
@ -429,8 +429,8 @@ class PatchAgent(PatchService):
LOG.warning("Setting %s option" % nolinktos) LOG.warning("Setting %s option" % nolinktos)
try: try:
output = subprocess.check_output(smart_cmd + output = subprocess.check_output(smart_cmd +
[ "config", "--set", ["config", "--set",
"%s=true" % nolinktos ], "%s=true" % nolinktos],
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
LOG.exception("Failed to configure %s option" % nolinktos) LOG.exception("Failed to configure %s option" % nolinktos)
@ -446,8 +446,8 @@ class PatchAgent(PatchService):
LOG.warning("Setting %s option" % nosignature) LOG.warning("Setting %s option" % nosignature)
try: try:
output = subprocess.check_output(smart_cmd + output = subprocess.check_output(smart_cmd +
[ "config", "--set", ["config", "--set",
"%s=false" % nosignature], "%s=false" % nosignature],
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
LOG.exception("Failed to configure %s option" % nosignature) LOG.exception("Failed to configure %s option" % nosignature)
@ -542,7 +542,7 @@ class PatchAgent(PatchService):
highest_version = None highest_version = None
try: try:
query = subprocess.check_output(smart_query_repos + [ "--show-format", '$version\n', pkgname ]) query = subprocess.check_output(smart_query_repos + ["--show-format", '$version\n', pkgname])
# The last non-blank version is the highest # The last non-blank version is the highest
for version in query.splitlines(): for version in query.splitlines():
if version == '': if version == '':
@ -562,7 +562,7 @@ class PatchAgent(PatchService):
# Get the installed version # Get the installed version
try: try:
query = subprocess.check_output(smart_query + [ "--installed", "--show-format", '$version\n', pkgname ]) query = subprocess.check_output(smart_query + ["--installed", "--show-format", '$version\n', pkgname])
for version in query.splitlines(): for version in query.splitlines():
if version == '': if version == '':
continue continue
@ -932,8 +932,8 @@ class PatchAgent(PatchService):
remaining = 30 remaining = 30
while True: while True:
inputs = [ self.sock_in, self.listener ] + connections inputs = [self.sock_in, self.listener] + connections
outputs = [ ] outputs = []
rlist, wlist, xlist = select.select(inputs, outputs, inputs, remaining) rlist, wlist, xlist = select.select(inputs, outputs, inputs, remaining)

View File

@ -84,58 +84,58 @@ def print_help():
print "Subcomands:" print "Subcomands:"
print "" print ""
print textwrap.fill(" {0:<15} ".format("upload:") + help_upload, print textwrap.fill(" {0:<15} ".format("upload:") + help_upload,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("upload-dir:") + help_upload_dir, print textwrap.fill(" {0:<15} ".format("upload-dir:") + help_upload_dir,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("apply:") + help_apply, print textwrap.fill(" {0:<15} ".format("apply:") + help_apply,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print textwrap.fill(help_patch_args, print textwrap.fill(help_patch_args,
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20) width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("remove:") + help_remove, print textwrap.fill(" {0:<15} ".format("remove:") + help_remove,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print textwrap.fill(help_patch_args, print textwrap.fill(help_patch_args,
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20) width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("delete:") + help_delete, print textwrap.fill(" {0:<15} ".format("delete:") + help_delete,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print textwrap.fill(help_patch_args, print textwrap.fill(help_patch_args,
width=TERM_WIDTH, initial_indent=' '*20, subsequent_indent=' '*20) width=TERM_WIDTH, initial_indent=' ' * 20, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("query:") + help_query, print textwrap.fill(" {0:<15} ".format("query:") + help_query,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("show:") + help_show, print textwrap.fill(" {0:<15} ".format("show:") + help_show,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("what-requires:") + help_what_requires, print textwrap.fill(" {0:<15} ".format("what-requires:") + help_what_requires,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("query-hosts:") + help_query_hosts, print textwrap.fill(" {0:<15} ".format("query-hosts:") + help_query_hosts,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("host-install:") + help_host_install, print textwrap.fill(" {0:<15} ".format("host-install:") + help_host_install,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("host-install-async:") + help_host_install_async, print textwrap.fill(" {0:<15} ".format("host-install-async:") + help_host_install_async,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("install-local:") + help_install_local, print textwrap.fill(" {0:<15} ".format("install-local:") + help_install_local,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("drop-host:") + help_drop_host, print textwrap.fill(" {0:<15} ".format("drop-host:") + help_drop_host,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("query-dependencies:") + help_query_dependencies, print textwrap.fill(" {0:<15} ".format("query-dependencies:") + help_query_dependencies,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("commit:") + help_commit, print textwrap.fill(" {0:<15} ".format("commit:") + help_commit,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
print textwrap.fill(" {0:<15} ".format("--os-region-name:") + help_region_name, print textwrap.fill(" {0:<15} ".format("--os-region-name:") + help_region_name,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
print "" print ""
exit(1) exit(1)
@ -284,73 +284,73 @@ def print_patch_show_result(req):
if "sw_version" in pd[patch_id] and pd[patch_id]["sw_version"] != "": if "sw_version" in pd[patch_id] and pd[patch_id]["sw_version"] != "":
print textwrap.fill(" {0:<15} ".format("Release:") + pd[patch_id]["sw_version"], print textwrap.fill(" {0:<15} ".format("Release:") + pd[patch_id]["sw_version"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "patchstate" in pd[patch_id] and pd[patch_id]["patchstate"] != "": if "patchstate" in pd[patch_id] and pd[patch_id]["patchstate"] != "":
print textwrap.fill(" {0:<15} ".format("Patch State:") + pd[patch_id]["patchstate"], print textwrap.fill(" {0:<15} ".format("Patch State:") + pd[patch_id]["patchstate"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if pd[patch_id]["patchstate"] == "n/a": if pd[patch_id]["patchstate"] == "n/a":
if "repostate" in pd[patch_id] and pd[patch_id]["repostate"] != "": if "repostate" in pd[patch_id] and pd[patch_id]["repostate"] != "":
print textwrap.fill(" {0:<15} ".format("Repo State:") + pd[patch_id]["repostate"], print textwrap.fill(" {0:<15} ".format("Repo State:") + pd[patch_id]["repostate"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "status" in pd[patch_id] and pd[patch_id]["status"] != "": if "status" in pd[patch_id] and pd[patch_id]["status"] != "":
print textwrap.fill(" {0:<15} ".format("Status:") + pd[patch_id]["status"], print textwrap.fill(" {0:<15} ".format("Status:") + pd[patch_id]["status"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "unremovable" in pd[patch_id] and pd[patch_id]["unremovable"] != "": if "unremovable" in pd[patch_id] and pd[patch_id]["unremovable"] != "":
print textwrap.fill(" {0:<15} ".format("Unremovable:") + pd[patch_id]["unremovable"], print textwrap.fill(" {0:<15} ".format("Unremovable:") + pd[patch_id]["unremovable"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "reboot_required" in pd[patch_id] and pd[patch_id]["reboot_required"] != "": if "reboot_required" in pd[patch_id] and pd[patch_id]["reboot_required"] != "":
print textwrap.fill(" {0:<15} ".format("RR:") + pd[patch_id]["reboot_required"], print textwrap.fill(" {0:<15} ".format("RR:") + pd[patch_id]["reboot_required"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "summary" in pd[patch_id] and pd[patch_id]["summary"] != "": if "summary" in pd[patch_id] and pd[patch_id]["summary"] != "":
print textwrap.fill(" {0:<15} ".format("Summary:") + pd[patch_id]["summary"], print textwrap.fill(" {0:<15} ".format("Summary:") + pd[patch_id]["summary"],
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
if "description" in pd[patch_id] and pd[patch_id]["description"] != "": if "description" in pd[patch_id] and pd[patch_id]["description"] != "":
first_line = True first_line = True
for line in pd[patch_id]["description"].split('\n'): for line in pd[patch_id]["description"].split('\n'):
if first_line: if first_line:
print textwrap.fill(" {0:<15} ".format("Description:") + line, print textwrap.fill(" {0:<15} ".format("Description:") + line,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
first_line = False first_line = False
else: else:
print textwrap.fill(line, print textwrap.fill(line,
width=TERM_WIDTH, subsequent_indent=' '*20, width=TERM_WIDTH, subsequent_indent=' ' * 20,
initial_indent=' '*20) initial_indent=' ' * 20)
if "install_instructions" in pd[patch_id] and pd[patch_id]["install_instructions"] != "": if "install_instructions" in pd[patch_id] and pd[patch_id]["install_instructions"] != "":
print " Install Instructions:" print " Install Instructions:"
for line in pd[patch_id]["install_instructions"].split('\n'): for line in pd[patch_id]["install_instructions"].split('\n'):
print textwrap.fill(line, print textwrap.fill(line,
width=TERM_WIDTH, subsequent_indent=' '*20, width=TERM_WIDTH, subsequent_indent=' ' * 20,
initial_indent=' '*20) initial_indent=' ' * 20)
if "warnings" in pd[patch_id] and pd[patch_id]["warnings"] != "": if "warnings" in pd[patch_id] and pd[patch_id]["warnings"] != "":
first_line = True first_line = True
for line in pd[patch_id]["warnings"].split('\n'): for line in pd[patch_id]["warnings"].split('\n'):
if first_line: if first_line:
print textwrap.fill(" {0:<15} ".format("Warnings:") + line, print textwrap.fill(" {0:<15} ".format("Warnings:") + line,
width=TERM_WIDTH, subsequent_indent=' '*20) width=TERM_WIDTH, subsequent_indent=' ' * 20)
first_line = False first_line = False
else: else:
print textwrap.fill(line, print textwrap.fill(line,
width=TERM_WIDTH, subsequent_indent=' '*20, width=TERM_WIDTH, subsequent_indent=' ' * 20,
initial_indent=' '*20) initial_indent=' ' * 20)
if "requires" in pd[patch_id] and len(pd[patch_id]["requires"]) > 0: if "requires" in pd[patch_id] and len(pd[patch_id]["requires"]) > 0:
print " Requires:" print " Requires:"
for req_patch in sorted(pd[patch_id]["requires"]): for req_patch in sorted(pd[patch_id]["requires"]):
print ' '*20 + req_patch print ' ' * 20 + req_patch
if "contents" in data and patch_id in data["contents"]: if "contents" in data and patch_id in data["contents"]:
print " Contents:" print " Contents:"
for pkg in sorted(data["contents"][patch_id]): for pkg in sorted(data["contents"][patch_id]):
print ' '*20 + pkg print ' ' * 20 + pkg
print "\n" print "\n"
@ -597,7 +597,7 @@ def patch_commit_req(debug, args):
print print
commit_warning = "WARNING: Committing a patch is an irreversible operation. " + \ commit_warning = "WARNING: Committing a patch is an irreversible operation. " + \
"Committed patches cannot be removed." "Committed patches cannot be removed."
print textwrap.fill(commit_warning, width=TERM_WIDTH, subsequent_indent=' '*9) print textwrap.fill(commit_warning, width=TERM_WIDTH, subsequent_indent=' ' * 9)
print print
user_input = raw_input("Would you like to continue? [y/N]: ") user_input = raw_input("Would you like to continue? [y/N]: ")
@ -699,19 +699,19 @@ def print_query_hosts_result(req):
for agent in sorted(agents, key=lambda a: a["hostname"]): for agent in sorted(agents, key=lambda a: a["hostname"]):
patch_current_field = "Yes" if agent["patch_current"] else "No" patch_current_field = "Yes" if agent["patch_current"] else "No"
if agent.get("interim_state") == True: if agent.get("interim_state") is True:
patch_current_field = "Pending" patch_current_field = "Pending"
if agent["patch_failed"]: if agent["patch_failed"]:
patch_current_field = "Failed" patch_current_field = "Failed"
print "{0:<{width_hn}} {1:<{width_ip}} {2:^{width_pc}} {3:^{width_rr}} {4:^{width_rel}} {5:^{width_state}}".format( print "{0:<{width_hn}} {1:<{width_ip}} {2:^{width_pc}} {3:^{width_rr}} {4:^{width_rel}} {5:^{width_state}}".format(
agent["hostname"], agent["hostname"],
agent["ip"], agent["ip"],
patch_current_field, patch_current_field,
"Yes" if agent["requires_reboot"] else "No", "Yes" if agent["requires_reboot"] else "No",
agent["sw_version"], agent["sw_version"],
agent["state"], agent["state"],
width_hn=width_hn, width_ip=width_ip, width_pc=width_pc, width_rr=width_rr, width_rel=width_rel, width_state=width_state) width_hn=width_hn, width_ip=width_ip, width_pc=width_pc, width_rr=width_rr, width_rel=width_rel, width_state=width_state)
elif req.status_code == 500: elif req.status_code == 500:
@ -862,7 +862,7 @@ def wait_for_install_complete(agent_ip):
break break
if state == constants.PATCH_AGENT_STATE_INSTALLING or \ if state == constants.PATCH_AGENT_STATE_INSTALLING or \
interim_state == True: interim_state is True:
# Still installing # Still installing
sys.stdout.write(".") sys.stdout.write(".")
sys.stdout.flush() sys.stdout.flush()
@ -1206,7 +1206,7 @@ def check_for_os_region_name():
try: try:
subprocess.check_output("pgrep -f dcorch-api-proxy", shell=True) subprocess.check_output("pgrep -f dcorch-api-proxy", shell=True)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
print ("Command must be run from the active controller.") print("Command must be run from the active controller.")
exit(1) exit(1)
# get a token and fetch the internal endpoint in SystemController # get a token and fetch the internal endpoint in SystemController

View File

@ -537,7 +537,6 @@ class PatchMessageDropHostReq(messages.PatchMessage):
sock.sendto(message, (cfg.controller_mcast_group, cfg.controller_port)) sock.sendto(message, (cfg.controller_mcast_group, cfg.controller_port))
class PatchController(PatchService): class PatchController(PatchService):
def __init__(self): def __init__(self):
PatchService.__init__(self) PatchService.__init__(self)
@ -1561,7 +1560,7 @@ class PatchController(PatchService):
self.inc_patch_op_counter() self.inc_patch_op_counter()
self.patch_data_lock.acquire() self.patch_data_lock.acquire()
#self.patch_data.load_all() # self.patch_data.load_all()
self.check_patch_states() self.check_patch_states()
self.patch_data_lock.release() self.patch_data_lock.release()
@ -1813,7 +1812,7 @@ class PatchController(PatchService):
raise e raise e
if pkgname not in keep[patch_sw_version]: if pkgname not in keep[patch_sw_version]:
keep[patch_sw_version][pkgname] = { arch: pkgver } keep[patch_sw_version][pkgname] = {arch: pkgver}
continue continue
elif arch not in keep[patch_sw_version][pkgname]: elif arch not in keep[patch_sw_version][pkgname]:
keep[patch_sw_version][pkgname][arch] = pkgver keep[patch_sw_version][pkgname][arch] = pkgver
@ -1823,9 +1822,9 @@ class PatchController(PatchService):
keep_pkgver = keep[patch_sw_version][pkgname][arch] keep_pkgver = keep[patch_sw_version][pkgname][arch]
if pkgver > keep_pkgver: if pkgver > keep_pkgver:
if pkgname not in cleanup[patch_sw_version]: if pkgname not in cleanup[patch_sw_version]:
cleanup[patch_sw_version][pkgname] = { arch: [ keep_pkgver ] } cleanup[patch_sw_version][pkgname] = {arch: [keep_pkgver]}
elif arch not in cleanup[patch_sw_version][pkgname]: elif arch not in cleanup[patch_sw_version][pkgname]:
cleanup[patch_sw_version][pkgname][arch] = [ keep_pkgver ] cleanup[patch_sw_version][pkgname][arch] = [keep_pkgver]
else: else:
cleanup[patch_sw_version][pkgname][arch].append(keep_pkgver) cleanup[patch_sw_version][pkgname][arch].append(keep_pkgver)
@ -1845,9 +1844,9 @@ class PatchController(PatchService):
else: else:
# Put this pkg in the cleanup list # Put this pkg in the cleanup list
if pkgname not in cleanup[patch_sw_version]: if pkgname not in cleanup[patch_sw_version]:
cleanup[patch_sw_version][pkgname] = { arch: [ pkgver ] } cleanup[patch_sw_version][pkgname] = {arch: [pkgver]}
elif arch not in cleanup[patch_sw_version][pkgname]: elif arch not in cleanup[patch_sw_version][pkgname]:
cleanup[patch_sw_version][pkgname][arch] = [ pkgver ] cleanup[patch_sw_version][pkgname][arch] = [pkgver]
else: else:
cleanup[patch_sw_version][pkgname][arch].append(pkgver) cleanup[patch_sw_version][pkgname][arch].append(pkgver)
@ -1868,7 +1867,7 @@ class PatchController(PatchService):
disk_space += statinfo.st_size disk_space += statinfo.st_size
if dry_run: if dry_run:
results["info"] = "This commit operation would free %0.2f MiB" % (disk_space/(1024.0*1024.0)) results["info"] = "This commit operation would free %0.2f MiB" % (disk_space / (1024.0 * 1024.0))
return results return results
# Do the commit # Do the commit

View File

@ -410,7 +410,7 @@ class PatchData:
else: else:
outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" ")) outfile.write(minidom.parseString(rough_xml).toprettyxml(indent=" "))
outfile.close() outfile.close()
os.rename(new_filename, filename) os.rename(new_filename, filename)
def parse_metadata(self, def parse_metadata(self,
filename, filename,
@ -773,16 +773,16 @@ class PatchFile:
for rpmfile in self.rpmlist.keys(): for rpmfile in self.rpmlist.keys():
shutil.copy(rpmfile, tmpdir) shutil.copy(rpmfile, tmpdir)
# add file signatures to RPMs # add file signatures to RPMs
try: try:
subprocess.check_call(["sign-rpms", "-d", tmpdir]) subprocess.check_call(["sign-rpms", "-d", tmpdir])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
print "Failed to to add file signatures to RPMs. Call to sign-rpms process returned non-zero exit status %i" % e.returncode print "Failed to to add file signatures to RPMs. Call to sign-rpms process returned non-zero exit status %i" % e.returncode
os.chdir(orig_wd) os.chdir(orig_wd)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
raise SystemExit(e.returncode) raise SystemExit(e.returncode)
# generate tar file # generate tar file
tar = tarfile.open("software.tar", "w") tar = tarfile.open("software.tar", "w")
for rpmfile in self.rpmlist.keys(): for rpmfile in self.rpmlist.keys():
tar.add(os.path.basename(rpmfile)) tar.add(os.path.basename(rpmfile))
@ -824,7 +824,8 @@ class PatchFile:
# Note: if cert_type requests a formal signature, but the signing key # Note: if cert_type requests a formal signature, but the signing key
# is not found, we'll instead sign with the 'dev' key and # is not found, we'll instead sign with the 'dev' key and
# need_resign_with_formal is set to True. # need_resign_with_formal is set to True.
need_resign_with_formal = sign_files(['metadata.tar', 'software.tar'], need_resign_with_formal = sign_files(
['metadata.tar', 'software.tar'],
detached_signature_file, detached_signature_file,
cert_type=cert_type) cert_type=cert_type)
@ -897,7 +898,7 @@ class PatchFile:
if verify_signature: if verify_signature:
# If there should be a detached signature, verify it # If there should be a detached signature, verify it
if os.path.exists(detached_signature_file): if os.path.exists(detached_signature_file):
filenames=["metadata.tar", "software.tar"] filenames = ["metadata.tar", "software.tar"]
sig_valid = verify_files( sig_valid = verify_files(
filenames, filenames,
detached_signature_file, detached_signature_file,
@ -947,11 +948,11 @@ class PatchFile:
try: try:
PatchFile.read_patch(abs_patch, metadata_only=True, cert_type=[cert_type_str]) PatchFile.read_patch(abs_patch, metadata_only=True, cert_type=[cert_type_str])
except PatchValidationFailure as e: except PatchValidationFailure as e:
pass; pass
else: else:
# Successfully opened the file for reading, and we have discovered the cert_type # Successfully opened the file for reading, and we have discovered the cert_type
r["cert"] = cert_type_str r["cert"] = cert_type_str
break; break
if "cert" not in r: if "cert" not in r:
# If cert is unknown, then file is not yet open for reading. # If cert is unknown, then file is not yet open for reading.
@ -973,7 +974,7 @@ class PatchFile:
"warnings", "reboot_required"]: "warnings", "reboot_required"]:
r[f] = thispatch.query_line(patch_id, f) r[f] = thispatch.query_line(patch_id, f)
else: else:
if field not in [ 'id', 'cert' ]: if field not in ['id', 'cert']:
r[field] = thispatch.query_line(patch_id, field) r[field] = thispatch.query_line(patch_id, field)
except PatchValidationFailure as e: except PatchValidationFailure as e:
@ -1185,7 +1186,7 @@ def patch_build():
'all-nodes=']) 'all-nodes='])
except getopt.GetoptError: except getopt.GetoptError:
print "Usage: %s [ <args> ] ... <rpm list>" \ print "Usage: %s [ <args> ] ... <rpm list>" \
% os.path.basename(sys.argv[0]) % os.path.basename(sys.argv[0])
print "Options:" print "Options:"
print "\t--id <id> Patch ID" print "\t--id <id> Patch ID"
print "\t--release <version> Platform release version" print "\t--release <version> Platform release version"

View File

@ -14,15 +14,15 @@ from binascii import a2b_base64
from cgcs_patch.patch_verify import read_RSA_key, cert_type_formal_str, cert_type_dev_str from cgcs_patch.patch_verify import read_RSA_key, cert_type_formal_str, cert_type_dev_str
# To save memory, read and hash 1M of files at a time # To save memory, read and hash 1M of files at a time
default_blocksize=1*1024*1024 default_blocksize = 1 * 1024 * 1024
# When we sign patches, look for private keys in the following paths # When we sign patches, look for private keys in the following paths
# #
# The (currently hardcoded) path on the signing server will be replaced # The (currently hardcoded) path on the signing server will be replaced
# by the capability to specify filename from calling function. # by the capability to specify filename from calling function.
private_key_files={cert_type_formal_str: '/signing/keys/formal-private-key.pem', private_key_files = {cert_type_formal_str: '/signing/keys/formal-private-key.pem',
cert_type_dev_str: os.path.expandvars('$MY_REPO/build-tools/signing/dev-private-key.pem') cert_type_dev_str: os.path.expandvars('$MY_REPO/build-tools/signing/dev-private-key.pem')
} }
def sign_files(filenames, signature_file, private_key=None, cert_type=None): def sign_files(filenames, signature_file, private_key=None, cert_type=None):
@ -39,21 +39,21 @@ def sign_files(filenames, signature_file, private_key=None, cert_type=None):
""" """
# Hash the data across all files # Hash the data across all files
blocksize=default_blocksize blocksize = default_blocksize
data_hash = SHA256.new() data_hash = SHA256.new()
for filename in filenames: for filename in filenames:
with open(filename, 'rb') as infile: with open(filename, 'rb') as infile:
data=infile.read(blocksize) data = infile.read(blocksize)
while len(data) > 0: while len(data) > 0:
data_hash.update(data) data_hash.update(data)
data=infile.read(blocksize) data = infile.read(blocksize)
# Find a private key to use, if not already provided # Find a private key to use, if not already provided
need_resign_with_formal = False need_resign_with_formal = False
if private_key is None: if private_key is None:
if cert_type is not None: if cert_type is not None:
# A Specific key is asked for # A Specific key is asked for
assert (cert_type in private_key_files.keys()),"cert_type=%s is not a known cert type" % cert_type assert (cert_type in private_key_files.keys()), "cert_type=%s is not a known cert type" % cert_type
dict_key = cert_type dict_key = cert_type
filename = private_key_files[dict_key] filename = private_key_files[dict_key]
# print 'cert_type given: Checking to see if ' + filename + ' exists\n' # print 'cert_type given: Checking to see if ' + filename + ' exists\n'
@ -75,7 +75,7 @@ def sign_files(filenames, signature_file, private_key=None, cert_type=None):
# print 'Getting private key from ' + filename + '\n' # print 'Getting private key from ' + filename + '\n'
private_key = read_RSA_key(open(filename, 'rb').read()) private_key = read_RSA_key(open(filename, 'rb').read())
assert (private_key is not None),"Could not find signing key" assert (private_key is not None), "Could not find signing key"
# Encrypt the hash (sign the data) with the key we find # Encrypt the hash (sign the data) with the key we find
signer = PKCS1_PSS.new(private_key) signer = PKCS1_PSS.new(private_key)

View File

@ -18,16 +18,17 @@ from binascii import a2b_base64
from cgcs_patch.certificates import dev_certificate, formal_certificate from cgcs_patch.certificates import dev_certificate, formal_certificate
# To save memory, read and hash 1M of files at a time # To save memory, read and hash 1M of files at a time
default_blocksize=1*1024*1024 default_blocksize = 1 * 1024 * 1024
dev_certificate_marker='/etc/pki/wrs/dev_certificate_enable.bin' dev_certificate_marker = '/etc/pki/wrs/dev_certificate_enable.bin'
LOG = logging.getLogger('main_logger') LOG = logging.getLogger('main_logger')
cert_type_dev_str='dev' cert_type_dev_str = 'dev'
cert_type_formal_str='formal' cert_type_formal_str = 'formal'
cert_type_dev=[cert_type_dev_str] cert_type_dev = [cert_type_dev_str]
cert_type_formal=[cert_type_formal_str] cert_type_formal = [cert_type_formal_str]
cert_type_all=[cert_type_dev_str, cert_type_formal_str] cert_type_all = [cert_type_dev_str, cert_type_formal_str]
def verify_hash(data_hash, signature_bytes, certificate_list): def verify_hash(data_hash, signature_bytes, certificate_list):
""" """
@ -74,7 +75,7 @@ def get_public_certificates_by_type(cert_type=cert_type_all):
""" """
Builds a list of accepted certificates which can be used to validate Builds a list of accepted certificates which can be used to validate
further things. This list may contain multiple certificates depending on further things. This list may contain multiple certificates depending on
the configuration of the system and the value of cert_type. the configuration of the system and the value of cert_type.
:param cert_type: A list of strings, certificate types to include in list :param cert_type: A list of strings, certificate types to include in list
'formal' - include formal certificate if available 'formal' - include formal certificate if available
@ -137,7 +138,7 @@ def read_RSA_key(key_data):
# the key object # the key object
# #
# We need to strip the BEGIN and END lines from PEM first # We need to strip the BEGIN and END lines from PEM first
x509lines = key_data.replace(' ','').split() x509lines = key_data.replace(' ', '').split()
x509text = ''.join(x509lines[1:-1]) x509text = ''.join(x509lines[1:-1])
x509data = DerSequence() x509data = DerSequence()
x509data.decode(a2b_base64(x509text)) x509data.decode(a2b_base64(x509text))
@ -166,14 +167,14 @@ def verify_files(filenames, signature_file, cert_type=None):
""" """
# Hash the data across all files # Hash the data across all files
blocksize=default_blocksize blocksize = default_blocksize
data_hash = SHA256.new() data_hash = SHA256.new()
for filename in filenames: for filename in filenames:
with open(filename, 'rb') as infile: with open(filename, 'rb') as infile:
data=infile.read(blocksize) data = infile.read(blocksize)
while len(data) > 0: while len(data) > 0:
data_hash.update(data) data_hash.update(data)
data=infile.read(blocksize) data = infile.read(blocksize)
# Get the signature # Get the signature
with open(signature_file, 'rb') as sig_file: with open(signature_file, 'rb') as sig_file:
@ -185,4 +186,3 @@ def verify_files(filenames, signature_file, cert_type=None):
else: else:
certificate_list = get_public_certificates_by_type(cert_type=cert_type) certificate_list = get_public_certificates_by_type(cert_type=cert_type)
return verify_hash(data_hash, signature_bytes, certificate_list) return verify_hash(data_hash, signature_bytes, certificate_list)

View File

@ -10,7 +10,8 @@ import posixfile
import string import string
import time import time
directory="/localdisk/designer/jenkins/patch_ids" directory = "/localdisk/designer/jenkins/patch_ids"
def get_unique_id(filename, digits=4): def get_unique_id(filename, digits=4):
counter = 1 counter = 1
@ -29,8 +30,8 @@ def get_unique_id(filename, digits=4):
print "creation of file '%s' failed" % path print "creation of file '%s' failed" % path
return -1 return -1
file.seek(0) # rewind file.seek(0) # rewind
format = "%%0%dd" % digits format = "%%0%dd" % digits
file.write(format % counter) file.write(format % counter)
# Note: close releases lock # Note: close releases lock
@ -38,6 +39,7 @@ def get_unique_id(filename, digits=4):
return counter return counter
def get_patch_id(version, prefix="CGCS", digits=4): def get_patch_id(version, prefix="CGCS", digits=4):
filename = "%s_%s_patchid" % (prefix, version) filename = "%s_%s_patchid" % (prefix, version)
id = get_unique_id(filename) id = get_unique_id(filename)
@ -46,4 +48,3 @@ def get_patch_id(version, prefix="CGCS", digits=4):
patch_id_format = "%%s_%%s_PATCH_%%0%dd" % digits patch_id_format = "%%s_%%s_PATCH_%%0%dd" % digits
patch_id = patch_id_format % (prefix, version, id) patch_id = patch_id_format % (prefix, version, id)
return patch_id return patch_id

View File

@ -11,11 +11,12 @@ import getopt
import sys import sys
opts = ['sw_version=', 'prefix=' ] opts = ['sw_version=', 'prefix=']
server = 'yow-cgts2-lx.wrs.com' server = 'yow-cgts2-lx.wrs.com'
port = 8888 port = 8888
def request_patch_id(sw_version="1.01", prefix="CGCS"): def request_patch_id(sw_version="1.01", prefix="CGCS"):
raw_parms = {} raw_parms = {}
raw_parms['sw_version'] = sw_version raw_parms['sw_version'] = sw_version
@ -27,6 +28,7 @@ def request_patch_id(sw_version="1.01", prefix="CGCS"):
response = urllib2.urlopen(url, params).read() response = urllib2.urlopen(url, params).read()
return response return response
def main(): def main():
optlist, remainder = getopt.getopt(sys.argv[1:], '', opts) optlist, remainder = getopt.getopt(sys.argv[1:], '', opts)

View File

@ -11,13 +11,13 @@ import web
import patch_id_allocator as pida import patch_id_allocator as pida
port = 8888 port = 8888
urls = ( urls = (
'/get_patch_id', 'get_patch_id', '/get_patch_id', 'get_patch_id',
) )
class get_patch_id: class get_patch_id:
def GET(self): def GET(self):
data = web.input(sw_version=None, prefix="CGCS") data = web.input(sw_version=None, prefix="CGCS")
@ -28,16 +28,17 @@ class get_patch_id:
data = web.input(sw_version=None, prefix="CGCS") data = web.input(sw_version=None, prefix="CGCS")
output = pida.get_patch_id(data.sw_version, data.prefix) output = pida.get_patch_id(data.sw_version, data.prefix)
return output return output
class MyApplication(web.application): class MyApplication(web.application):
def run(self, port=8080, *middleware): def run(self, port=8080, *middleware):
func = self.wsgifunc(*middleware) func = self.wsgifunc(*middleware)
return web.httpserver.runsimple(func, ('0.0.0.0', port)) return web.httpserver.runsimple(func, ('0.0.0.0', port))
def main(): def main():
app = MyApplication(urls, globals()) app = MyApplication(urls, globals())
app.run(port=port) app.run(port=port)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -8,13 +8,13 @@
import setuptools import setuptools
setuptools.setup(name='cgcs_patch', setuptools.setup(
name='cgcs_patch',
version='1.0', version='1.0',
description='CGCS Patch', description='CGCS Patch',
packages=setuptools.find_packages(), packages=setuptools.find_packages(),
package_data = { package_data={
# Include templates # Include templates
'': ['templates/*'], '': ['templates/*'],
} }
) )

View File

@ -28,8 +28,6 @@ from cgcs_patch.constants import ENABLE_DEV_CERTIFICATE_PATCH_IDENTIFIER
LOG_FILE = '/var/log/patch-alarms.log' LOG_FILE = '/var/log/patch-alarms.log'
PID_FILE = '/var/run/patch-alarm-manager.pid' PID_FILE = '/var/run/patch-alarm-manager.pid'
#logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG)
################### ###################
# METHODS # METHODS
@ -97,10 +95,10 @@ class PatchAlarmDaemon():
if 'pd' in data: if 'pd' in data:
for patch_id, metadata in data['pd'].iteritems(): for patch_id, metadata in data['pd'].iteritems():
if 'patchstate' in metadata and \ if 'patchstate' in metadata and \
(metadata['patchstate'] == 'Partial-Apply' or metadata['patchstate'] == 'Partial-Remove'): (metadata['patchstate'] == 'Partial-Apply' or metadata['patchstate'] == 'Partial-Remove'):
raise_pip_alarm = True raise_pip_alarm = True
if 'status' in metadata and \ if 'status' in metadata and \
(metadata['status'] == 'OBS' or metadata['status'] == 'Obsolete'): (metadata['status'] == 'OBS' or metadata['status'] == 'Obsolete'):
raise_obs_alarm = True raise_obs_alarm = True
# If there is a patch in the system (in any state) that is # If there is a patch in the system (in any state) that is
# named some variation of "enable-dev-certificate", raise # named some variation of "enable-dev-certificate", raise
@ -113,16 +111,16 @@ class PatchAlarmDaemon():
entity_instance_id) entity_instance_id)
if raise_pip_alarm and pip_alarm is None: if raise_pip_alarm and pip_alarm is None:
logging.info("Raising patch-in-progress alarm") logging.info("Raising patch-in-progress alarm")
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_IN_PROGRESS, fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_IN_PROGRESS,
alarm_type = fm_constants.FM_ALARM_TYPE_5, alarm_type=fm_constants.FM_ALARM_TYPE_5,
alarm_state = fm_constants.FM_ALARM_STATE_SET, alarm_state=fm_constants.FM_ALARM_STATE_SET,
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST, entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
entity_instance_id = entity_instance_id, entity_instance_id=entity_instance_id,
severity = fm_constants.FM_ALARM_SEVERITY_MINOR, severity=fm_constants.FM_ALARM_SEVERITY_MINOR,
reason_text = 'Patching operation in progress', reason_text='Patching operation in progress',
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65, probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
proposed_repair_action = 'Complete reboots of affected hosts', proposed_repair_action='Complete reboots of affected hosts',
service_affecting = False) service_affecting=False)
self.fm_api.set_fault(fault) self.fm_api.set_fault(fault)
elif not raise_pip_alarm and pip_alarm is not None: elif not raise_pip_alarm and pip_alarm is not None:
@ -134,16 +132,16 @@ class PatchAlarmDaemon():
entity_instance_id) entity_instance_id)
if raise_obs_alarm and obs_alarm is None: if raise_obs_alarm and obs_alarm is None:
logging.info("Raising obsolete-patch-in-system alarm") logging.info("Raising obsolete-patch-in-system alarm")
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_OBS_IN_SYSTEM, fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_OBS_IN_SYSTEM,
alarm_type = fm_constants.FM_ALARM_TYPE_5, alarm_type=fm_constants.FM_ALARM_TYPE_5,
alarm_state = fm_constants.FM_ALARM_STATE_SET, alarm_state=fm_constants.FM_ALARM_STATE_SET,
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST, entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
entity_instance_id = entity_instance_id, entity_instance_id=entity_instance_id,
severity = fm_constants.FM_ALARM_SEVERITY_WARNING, severity=fm_constants.FM_ALARM_SEVERITY_WARNING,
reason_text = 'Obsolete patch in system', reason_text='Obsolete patch in system',
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65, probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
proposed_repair_action = 'Remove and delete obsolete patches', proposed_repair_action='Remove and delete obsolete patches',
service_affecting = False) service_affecting=False)
self.fm_api.set_fault(fault) self.fm_api.set_fault(fault)
elif not raise_obs_alarm and obs_alarm is not None: elif not raise_obs_alarm and obs_alarm is not None:
@ -155,21 +153,20 @@ class PatchAlarmDaemon():
entity_instance_id) entity_instance_id)
if raise_cert_alarm and cert_alarm is None: if raise_cert_alarm and cert_alarm is None:
logging.info("Raising developer-certificate-enabled alarm") logging.info("Raising developer-certificate-enabled alarm")
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_NONSTANDARD_CERT_PATCH, fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_NONSTANDARD_CERT_PATCH,
alarm_type = fm_constants.FM_ALARM_TYPE_9, alarm_type=fm_constants.FM_ALARM_TYPE_9,
alarm_state = fm_constants.FM_ALARM_STATE_SET, alarm_state=fm_constants.FM_ALARM_STATE_SET,
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST, entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
entity_instance_id = entity_instance_id, entity_instance_id=entity_instance_id,
severity = fm_constants.FM_ALARM_SEVERITY_CRITICAL, severity=fm_constants.FM_ALARM_SEVERITY_CRITICAL,
reason_text = 'Developer patch certificate is enabled', reason_text='Developer patch certificate is enabled',
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65, probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
proposed_repair_action = 'Reinstall system to disable certificate and remove untrusted patches', proposed_repair_action='Reinstall system to disable certificate and remove untrusted patches',
suppression = False, suppression=False,
service_affecting = False) service_affecting=False)
self.fm_api.set_fault(fault) self.fm_api.set_fault(fault)
def _get_handle_failed_hosts(self): def _get_handle_failed_hosts(self):
url = "http://%s/patch/query_hosts" % self.api_addr url = "http://%s/patch/query_hosts" % self.api_addr
@ -202,16 +199,16 @@ class PatchAlarmDaemon():
else: else:
logging.info("Updating patch-host-install-failure alarm") logging.info("Updating patch-host-install-failure alarm")
fault = fm_api.Fault(alarm_id = fm_constants.FM_ALARM_ID_PATCH_HOST_INSTALL_FAILED, fault = fm_api.Fault(alarm_id=fm_constants.FM_ALARM_ID_PATCH_HOST_INSTALL_FAILED,
alarm_type = fm_constants.FM_ALARM_TYPE_5, alarm_type=fm_constants.FM_ALARM_TYPE_5,
alarm_state = fm_constants.FM_ALARM_STATE_SET, alarm_state=fm_constants.FM_ALARM_STATE_SET,
entity_type_id = fm_constants.FM_ENTITY_TYPE_HOST, entity_type_id=fm_constants.FM_ENTITY_TYPE_HOST,
entity_instance_id = entity_instance_id, entity_instance_id=entity_instance_id,
severity = fm_constants.FM_ALARM_SEVERITY_MAJOR, severity=fm_constants.FM_ALARM_SEVERITY_MAJOR,
reason_text = reason_text, reason_text=reason_text,
probable_cause = fm_constants.ALARM_PROBABLE_CAUSE_65, probable_cause=fm_constants.ALARM_PROBABLE_CAUSE_65,
proposed_repair_action = 'Undo patching operation', proposed_repair_action='Undo patching operation',
service_affecting = False) service_affecting=False)
self.fm_api.set_fault(fault) self.fm_api.set_fault(fault)
elif patch_failed_alarm is not None: elif patch_failed_alarm is not None:
@ -220,4 +217,3 @@ class PatchAlarmDaemon():
entity_instance_id) entity_instance_id)
return False return False

View File

@ -9,11 +9,11 @@ SPDX-License-Identifier: Apache-2.0
import setuptools import setuptools
setuptools.setup(name='patch_alarm', setuptools.setup(
version='1.0.0', name='patch_alarm',
description='CEPH alarm', version='1.0.0',
license='Apache-2.0', description='CEPH alarm',
packages=['patch_alarm'], license='Apache-2.0',
entry_points={ packages=['patch_alarm'],
} entry_points={}
) )

29
tox.ini
View File

@ -23,36 +23,13 @@ commands =
-o \( -name .tox -prune \) \ -o \( -name .tox -prune \) \
-o -type f -name '*.yaml' \ -o -type f -name '*.yaml' \
-print0 | xargs -0 yamllint" -print0 | xargs -0 yamllint"
bash -c "ls cgcs-patch/bin/*.sh | xargs bashate -v {posargs}" bash -c "ls cgcs-patch/bin/*.sh | xargs bashate -v -iE006 {posargs}"
[pep8] [pep8]
# Temporarily ignoring these warnings # Ignoring these warnings
# E101 indentation contains mixed spaces and tabs
# E116 unexpected indentation (comment)
# E121 continuation line under-indented for hanging indent
# E122 continuation line missing indentation or outdented
# E123 closing bracket does not match indentation of opening bracket
# E124 closing bracket does not match visual indentation
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line
# E203 whitespace before ':'
# E211 whitespace before '('
# E225 missing whitespace around operator
# E226 missing whitespace around arithmetic operator
# E228 missing whitespace around modulo operator
# E231 missing whitespace after ':'
# E241 multiple spaces after
# E261 at least two spaces before inline comment
# E265 block comment should start with '# '
# E251 unexpected spaces around keyword / parameter equals
# E302 expected 2 blank lines, found 1
# E303 too many blank lines
# E501 line too long # E501 line too long
# E712 comparison to bool should be reworded
ignore = E101,E116,E121,E123,E122,E124,E126,E127,E128,E129,E203,E211,E225,E226,E228,E231,E241,E251,E261,E265,E302,E303,E501,E712 ignore = E501
[testenv:pep8] [testenv:pep8]
usedevelop = False usedevelop = False

View File

@ -9,11 +9,11 @@ SPDX-License-Identifier: Apache-2.0
import setuptools import setuptools
setuptools.setup(name='tsconfig', setuptools.setup(
version='1.0.0', name='tsconfig',
description='tsconfig', version='1.0.0',
license='Apache-2.0', description='tsconfig',
packages=['tsconfig'], license='Apache-2.0',
entry_points={ packages=['tsconfig'],
} entry_points={}
) )