Merge "Fix flake8 errors and disable ignore case"

This commit is contained in:
Zuul 2018-11-16 18:35:28 +00:00 committed by Gerrit Code Review
commit 0c142d12b7
88 changed files with 354 additions and 356 deletions

View File

@ -6,19 +6,19 @@
from oslo_config import cfg from oslo_config import cfg
API_SERVICE_OPTS = [ API_SERVICE_OPTS = [
cfg.StrOpt('sm_api_bind_ip', cfg.StrOpt('sm_api_bind_ip',
default='0.0.0.0', default='0.0.0.0',
help='IP for the Service Management API server to bind to', help='IP for the Service Management API server to bind to',
), ),
cfg.IntOpt('sm_api_port', cfg.IntOpt('sm_api_port',
default=7777, default=7777,
help='The port for the Service Management API server', help='The port for the Service Management API server',
), ),
cfg.IntOpt('api_limit_max', cfg.IntOpt('api_limit_max',
default=1000, default=1000,
help='the maximum number of items returned in a single ' help='the maximum number of items returned in a single '
'response from a collection resource'), 'response from a collection resource'),
] ]
CONF = cfg.CONF CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api', opt_group = cfg.OptGroup(name='api',

View File

@ -39,7 +39,7 @@ def register_opts(conf):
:param conf: SmApi settings. :param conf: SmApi settings.
""" """
#conf.register_opts(keystone_auth_token._OPTS, group=OPT_GROUP_NAME) # conf.register_opts(keystone_auth_token._OPTS, group=OPT_GROUP_NAME)
keystone_auth_token.CONF = conf keystone_auth_token.CONF = conf
@ -69,6 +69,7 @@ class AdminAuthHook(hooks.PecanHook):
rejects the request otherwise. rejects the request otherwise.
""" """
def before(self, state): def before(self, state):
ctx = state.request.context ctx = state.request.context

View File

@ -64,4 +64,5 @@ def main():
print(e) print(e)
sys.exit(-4) sys.exit(-4)
main() main()

View File

@ -20,9 +20,9 @@ from sm_api.common import policy
auth_opts = [ auth_opts = [
cfg.StrOpt('auth_strategy', cfg.StrOpt('auth_strategy',
default='keystone', default='keystone',
help='Method to use for auth: noauth or keystone.'), help='Method to use for auth: noauth or keystone.'),
] ]
CONF = cfg.CONF CONF = cfg.CONF
CONF.register_opts(auth_opts) CONF.register_opts(auth_opts)

View File

@ -14,4 +14,4 @@ app = {'root': 'sm_api.api.controllers.root.RootController',
'debug': False, 'debug': False,
'enable_acl': True, 'enable_acl': True,
'acl_public_routes': ['/', '/v1'] 'acl_public_routes': ['/', '/v1']
} }

View File

@ -75,10 +75,10 @@ class Version1(wsme_types.Base):
v1.servicenode = [link.Link.make_link('self', v1.servicenode = [link.Link.make_link('self',
pecan.request.host_url, pecan.request.host_url,
'servicenode', ''), 'servicenode', ''),
link.Link.make_link('bookmark', link.Link.make_link('bookmark',
pecan.request.host_url, pecan.request.host_url,
'servicenode', '', 'servicenode', '',
bookmark=True)] bookmark=True)]
v1.sm_sda = [link.Link.make_link('self', v1.sm_sda = [link.Link.make_link('self',
pecan.request.host_url, pecan.request.host_url,
'sm_sda', ''), 'sm_sda', ''),
@ -103,4 +103,5 @@ class Controller(rest.RestController):
def get(self): def get(self):
return Version1.convert() return Version1.convert()
__all__ = Controller __all__ = Controller

View File

@ -9,8 +9,6 @@
# All Rights Reserved. # All Rights Reserved.
# #
import datetime
import wsme import wsme
from wsme import types as wsme_types from wsme import types as wsme_types
@ -50,6 +48,6 @@ class APIBase(wsme_types.Base):
# Unset non-required fields so they do not appear # Unset non-required fields so they do not appear
# in the message body # in the message body
obj_dict.update(dict((k, wsme.Unset) obj_dict.update(dict((k, wsme.Unset)
for k in obj_dict.keys() for k in obj_dict.keys()
if fields and k not in fields)) if fields and k not in fields))
return cls(**obj_dict) return cls(**obj_dict)

View File

@ -48,8 +48,8 @@ class Collection(base.APIBase):
resource_url = url or self._type resource_url = url or self._type
q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs]) q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs])
next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % { next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % {
'args': q_args, 'limit': limit, 'args': q_args, 'limit': limit,
'marker': self.collection[-1].uuid} 'marker': self.collection[-1].uuid}
return link.Link.make_link('next', pecan.request.host_url, return link.Link.make_link('next', pecan.request.host_url,
resource_url, next_args).href resource_url, next_args).href

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
import json
import wsme
from wsme import types as wsme_types from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
import pecan import pecan
@ -66,7 +64,7 @@ class Nodes(base.APIBase):
'ready_state'] 'ready_state']
fields = minimum_fields if not expand else None fields = minimum_fields if not expand else None
nodes = Nodes.from_rpc_object( nodes = Nodes.from_rpc_object(
rpc_nodes, fields) rpc_nodes, fields)
return nodes return nodes
@ -85,8 +83,8 @@ class NodesCollection(collection.Collection):
expand=False, **kwargs): expand=False, **kwargs):
collection = NodesCollection() collection = NodesCollection()
collection.nodes = [ collection.nodes = [
Nodes.convert_with_links(ch, expand) Nodes.convert_with_links(ch, expand)
for ch in nodes] for ch in nodes]
url = url or None url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs) collection.next = collection.get_next(limit, url=url, **kwargs)
return collection return collection
@ -103,7 +101,7 @@ class NodesController(rest.RestController):
marker_obj = None marker_obj = None
if marker: if marker:
marker_obj = objects.sm_node.get_by_uuid( marker_obj = objects.sm_node.get_by_uuid(
pecan.request.context, marker) pecan.request.context, marker)
nodes = pecan.request.dbapi.sm_node_get_list(limit, nodes = pecan.request.dbapi.sm_node_get_list(limit,
marker_obj, marker_obj,
@ -118,7 +116,7 @@ class NodesController(rest.RestController):
except exception.ServerNotFound: except exception.ServerNotFound:
return None return None
return Nodes.convert_with_links(rpc_sg) return Nodes.convert_with_links(rpc_sg)
@wsme_pecan.wsexpose(NodesCollection, six.text_type, int, @wsme_pecan.wsexpose(NodesCollection, six.text_type, int,
six.text_type, six.text_type) six.text_type, six.text_type)
@ -132,8 +130,8 @@ class NodesController(rest.RestController):
sort_dir) sort_dir)
return NodesCollection.convert_with_links(nodes, limit, return NodesCollection.convert_with_links(nodes, limit,
sort_key=sort_key, sort_key=sort_key,
sort_dir=sort_dir) sort_dir=sort_dir)
@wsme_pecan.wsexpose(NodesCommandResult, six.text_type, @wsme_pecan.wsexpose(NodesCommandResult, six.text_type,
body=NodesCommand) body=NodesCommand)

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
import json
import wsme
from wsme import types as wsme_types from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
import pecan import pecan
@ -72,7 +70,7 @@ class ServiceGroup(base.APIBase):
fields = minimum_fields if not expand else None fields = minimum_fields if not expand else None
service_groups = ServiceGroup.from_rpc_object( service_groups = ServiceGroup.from_rpc_object(
rpc_service_groups, fields) rpc_service_groups, fields)
return service_groups return service_groups
@ -91,8 +89,8 @@ class ServiceGroupCollection(collection.Collection):
expand=False, **kwargs): expand=False, **kwargs):
collection = ServiceGroupCollection() collection = ServiceGroupCollection()
collection.service_groups = [ collection.service_groups = [
ServiceGroup.convert_with_links(ch, expand) ServiceGroup.convert_with_links(ch, expand)
for ch in service_groups] for ch in service_groups]
url = url or None url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs) collection.next = collection.get_next(limit, url=url, **kwargs)
return collection return collection
@ -141,13 +139,13 @@ class ServiceGroupController(rest.RestController):
"""Retrieve list of servicegroups.""" """Retrieve list of servicegroups."""
service_groups = self._get_service_groups(marker, service_groups = self._get_service_groups(marker,
limit, limit,
sort_key, sort_key,
sort_dir) sort_dir)
return ServiceGroupCollection.convert_with_links(service_groups, limit, return ServiceGroupCollection.convert_with_links(service_groups, limit,
sort_key=sort_key, sort_key=sort_key,
sort_dir=sort_dir) sort_dir=sort_dir)
@wsme_pecan.wsexpose(ServiceGroupCommandResult, six.text_type, @wsme_pecan.wsexpose(ServiceGroupCommandResult, six.text_type,
body=ServiceGroupCommand) body=ServiceGroupCommand)

View File

@ -33,7 +33,6 @@ import json
from sm_api.api.controllers.v1 import base from sm_api.api.controllers.v1 import base
from sm_api.api.controllers.v1 import smc_api from sm_api.api.controllers.v1 import smc_api
from sm_api.openstack.common import log from sm_api.openstack.common import log
from sm_api.api.controllers.v1 import services
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -226,7 +225,7 @@ class ServiceNodeController(rest.RestController):
# check whether hostname exists in nodes table # check whether hostname exists in nodes table
node_exists = False node_exists = False
sm_nodes = pecan.request.dbapi.sm_node_get_by_name(hostname) sm_nodes = pecan.request.dbapi.sm_node_get_by_name(hostname)
for sm_node in sm_nodes: for _sm_node in sm_nodes:
node_exists = True node_exists = True
return node_exists return node_exists
@ -339,7 +338,7 @@ class ServiceNodeController(rest.RestController):
# degraded or failure of A/A service on the target host # degraded or failure of A/A service on the target host
# would not stop swact # would not stop swact
sdm = self._sm_sdm_get(sm_sda.name, sdm = self._sm_sdm_get(sm_sda.name,
sm_sda.service_group_name) sm_sda.service_group_name)
if (self._is_aa_service_group(sdm)): if (self._is_aa_service_group(sdm)):
continue continue
@ -349,14 +348,14 @@ class ServiceNodeController(rest.RestController):
# or service only provisioned in the other host # or service only provisioned in the other host
# or service state are the same on both hosts # or service state are the same on both hosts
if SM_SERVICE_GROUP_STATE_ACTIVE != sm_sda.state \ if SM_SERVICE_GROUP_STATE_ACTIVE != sm_sda.state \
and SM_SERVICE_GROUP_STATE_STANDBY != sm_sda.state \ and SM_SERVICE_GROUP_STATE_STANDBY != sm_sda.state \
and sm_sda.service_group_name in origin_state \ and sm_sda.service_group_name in origin_state \
and origin_state[sm_sda.service_group_name] != sm_sda.state: and origin_state[sm_sda.service_group_name] != sm_sda.state:
check_result = ( check_result = (
"%s on %s is not ready to take service, " "%s on %s is not ready to take service, "
"service not in the active or standby " "service not in the active or standby "
"state" % (sm_sda.service_group_name, "state" % (sm_sda.service_group_name,
sm_sda.node_name)) sm_sda.node_name))
break break
# Verify that all the services are in the desired state on # Verify that all the services are in the desired state on
@ -481,7 +480,7 @@ class ServiceNodeController(rest.RestController):
rsvc = self.get_remote_svc(sda.node_name, service_name) rsvc = self.get_remote_svc(sda.node_name, service_name)
if (SM_SERVICE_STATE_ENABLED_ACTIVE == if (SM_SERVICE_STATE_ENABLED_ACTIVE ==
rsvc['desired_state'] and rsvc['desired_state'] and
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']): SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
chk_list[sda.service_group_name].remove(service_name) chk_list[sda.service_group_name].remove(service_name)
all_good = True all_good = True
@ -495,7 +494,7 @@ class ServiceNodeController(rest.RestController):
target_services = [] target_services = []
for sda in sdas: for sda in sdas:
if (sda.node_name == hostname and if (sda.node_name == hostname and
sda.service_group_name in chk_list): sda.service_group_name in chk_list):
for service_name in chk_list[sda.service_group_name]: for service_name in chk_list[sda.service_group_name]:
LOG.info("checking %s on %s" % (service_name, hostname)) LOG.info("checking %s on %s" % (service_name, hostname))
rsvc = self.get_remote_svc(sda.node_name, service_name) rsvc = self.get_remote_svc(sda.node_name, service_name)
@ -503,7 +502,7 @@ class ServiceNodeController(rest.RestController):
continue continue
if (SM_SERVICE_STATE_ENABLED_ACTIVE == if (SM_SERVICE_STATE_ENABLED_ACTIVE ==
rsvc['desired_state'] and rsvc['desired_state'] and
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']): SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
LOG.info("which is %s %s" % (rsvc['desired_state'], rsvc['state'])) LOG.info("which is %s %s" % (rsvc['desired_state'], rsvc['state']))
target_services.append(service_name) target_services.append(service_name)
LOG.info("services %s solely running on %s" % (','.join(target_services), hostname)) LOG.info("services %s solely running on %s" % (','.join(target_services), hostname))
@ -579,30 +578,30 @@ class ServiceNodeController(rest.RestController):
ack_avail = sm_ack_dict['SM_API_MSG_NODE_AVAIL'].lower() ack_avail = sm_ack_dict['SM_API_MSG_NODE_AVAIL'].lower()
LOG.info("sm-api _do_modify_command sm_ack_dict: %s ACK admin: " LOG.info("sm-api _do_modify_command sm_ack_dict: %s ACK admin: "
"%s oper: %s avail: %s." % (sm_ack_dict, ack_admin, "%s oper: %s avail: %s." % (sm_ack_dict, ack_admin,
ack_oper, ack_avail)) ack_oper, ack_avail))
# loose check on admin and oper only # loose check on admin and oper only
if (command.admin == ack_admin) and (command.oper == ack_oper): if (command.admin == ack_admin) and (command.oper == ack_oper):
return ServiceNodeCommandResult( return ServiceNodeCommandResult(
origin=sm_ack_dict['SM_API_MSG_ORIGIN'], origin=sm_ack_dict['SM_API_MSG_ORIGIN'],
hostname=sm_ack_dict['SM_API_MSG_NODE_NAME'], hostname=sm_ack_dict['SM_API_MSG_NODE_NAME'],
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'], action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin, admin=ack_admin,
oper=ack_oper, oper=ack_oper,
avail=ack_avail, avail=ack_avail,
error_code=ERR_CODE_SUCCESS, error_code=ERR_CODE_SUCCESS,
error_msg="success") error_msg="success")
else: else:
result = ServiceNodeCommandResult( result = ServiceNodeCommandResult(
origin="sm", origin="sm",
hostname=hostname, hostname=hostname,
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'], action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin, admin=ack_admin,
oper=ack_oper, oper=ack_oper,
avail=ack_avail, avail=ack_avail,
error_code=ERR_CODE_ACTION_FAILED, error_code=ERR_CODE_ACTION_FAILED,
error_details="action failed") error_details="action failed")
return wsme.api.Response(result, status_code=500) return wsme.api.Response(result, status_code=500)
else: else:
@ -613,7 +612,7 @@ class ServiceNodeController(rest.RestController):
try: try:
data = self._get_sm_node_state(hostname) data = self._get_sm_node_state(hostname)
except: except Exception as e:
LOG.exception("No entry in database for %s:" % hostname) LOG.exception("No entry in database for %s:" % hostname)
return ServiceNode(origin="sm", return ServiceNode(origin="sm",
hostname=hostname, hostname=hostname,

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
import json
import wsme
from wsme import types as wsme_types from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
import pecan import pecan
@ -65,7 +63,7 @@ class Services(base.APIBase):
minimum_fields = ['id', 'name', 'desired_state', 'state', 'status'] minimum_fields = ['id', 'name', 'desired_state', 'state', 'status']
fields = minimum_fields if not expand else None fields = minimum_fields if not expand else None
services = Services.from_rpc_object( services = Services.from_rpc_object(
rpc_services, fields) rpc_services, fields)
return services return services
@ -84,8 +82,8 @@ class ServicesCollection(collection.Collection):
expand=False, **kwargs): expand=False, **kwargs):
collection = ServicesCollection() collection = ServicesCollection()
collection.services = [ collection.services = [
Services.convert_with_links(ch, expand) Services.convert_with_links(ch, expand)
for ch in services] for ch in services]
url = url or None url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs) collection.next = collection.get_next(limit, url=url, **kwargs)
return collection return collection
@ -102,12 +100,12 @@ class ServicesController(rest.RestController):
marker_obj = None marker_obj = None
if marker: if marker:
marker_obj = objects.service.get_by_uuid( marker_obj = objects.service.get_by_uuid(
pecan.request.context, marker) pecan.request.context, marker)
services = pecan.request.dbapi.sm_service_get_list(limit, services = pecan.request.dbapi.sm_service_get_list(limit,
marker_obj, marker_obj,
sort_key=sort_key, sort_key=sort_key,
sort_dir=sort_dir) sort_dir=sort_dir)
return services return services
@wsme_pecan.wsexpose(Services, six.text_type) @wsme_pecan.wsexpose(Services, six.text_type)
@ -134,9 +132,9 @@ class ServicesController(rest.RestController):
"""Retrieve list of services.""" """Retrieve list of services."""
services = self._get_services(marker, services = self._get_services(marker,
limit, limit,
sort_key, sort_key,
sort_dir) sort_dir)
return ServicesCollection.convert_with_links(services, limit, return ServicesCollection.convert_with_links(services, limit,
sort_key=sort_key, sort_key=sort_key,

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
import json
import wsme
from wsme import types as wsme_types from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
import pecan import pecan
@ -71,7 +69,7 @@ class SmSda(base.APIBase):
'state', 'status', 'condition'] 'state', 'status', 'condition']
fields = minimum_fields if not expand else None fields = minimum_fields if not expand else None
sm_sda = SmSda.from_rpc_object( sm_sda = SmSda.from_rpc_object(
rpc_sm_sda, fields) rpc_sm_sda, fields)
return sm_sda return sm_sda
@ -90,8 +88,8 @@ class SmSdaCollection(collection.Collection):
expand=False, **kwargs): expand=False, **kwargs):
collection = SmSdaCollection() collection = SmSdaCollection()
collection.sm_sda = [ collection.sm_sda = [
SmSda.convert_with_links(ch, expand) SmSda.convert_with_links(ch, expand)
for ch in sm_sda] for ch in sm_sda]
url = url or None url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs) collection.next = collection.get_next(limit, url=url, **kwargs)
return collection return collection

View File

@ -91,19 +91,19 @@ def sm_api_notify(sm_dict):
break break
else: else:
LOG.debug(_("sm-api mismatch seqno tx message: %s rx message: %s " % (sm_buf, sm_ack))) LOG.debug(_("sm-api mismatch seqno tx message: %s rx message: %s " % (sm_buf, sm_ack)))
except: except Exception as e:
LOG.exception(_("sm-api bad rx message: %s" % sm_ack)) LOG.exception(_("sm-api bad rx message: %s" % sm_ack))
except socket.error as e: except socket.error as e:
LOG.exception(_("sm-api socket error: %s on %s") % (e, sm_buf)) LOG.exception(_("sm-api socket error: %s on %s") % (e, sm_buf))
sm_ack_dict = { sm_ack_dict = {
'SM_API_MSG_TYPE': "unknown_set_node", 'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_NODE_ACTION': sm_dict['SM_API_MSG_NODE_ACTION'], 'SM_API_MSG_NODE_ACTION': sm_dict['SM_API_MSG_NODE_ACTION'],
'SM_API_MSG_ORIGIN': "sm", 'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'], 'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown", 'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPER': "unknown", 'SM_API_MSG_NODE_OPER': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown"} 'SM_API_MSG_NODE_AVAIL': "unknown"}
return sm_ack_dict return sm_ack_dict
@ -116,46 +116,46 @@ def sm_api_notify(sm_dict):
try: try:
sm_ack_list = sm_ack.split(",") sm_ack_list = sm_ack.split(",")
sm_ack_dict = { sm_ack_dict = {
'SM_API_MSG_VERSION': sm_ack_list[SM_API_MSG_VERSION_FIELD], 'SM_API_MSG_VERSION': sm_ack_list[SM_API_MSG_VERSION_FIELD],
'SM_API_MSG_REVISION': sm_ack_list[SM_API_MSG_REVISION_FIELD], 'SM_API_MSG_REVISION': sm_ack_list[SM_API_MSG_REVISION_FIELD],
'SM_API_MSG_SEQNO': sm_ack_list[SM_API_MSG_SEQNO_FIELD], 'SM_API_MSG_SEQNO': sm_ack_list[SM_API_MSG_SEQNO_FIELD],
'SM_API_MSG_TYPE': sm_ack_list[SM_API_MSG_TYPE_FIELD], 'SM_API_MSG_TYPE': sm_ack_list[SM_API_MSG_TYPE_FIELD],
'SM_API_MSG_NODE_ACTION': sm_ack_list[SM_API_MSG_NODE_ACTION_FIELD], 'SM_API_MSG_NODE_ACTION': sm_ack_list[SM_API_MSG_NODE_ACTION_FIELD],
'SM_API_MSG_ORIGIN': sm_ack_list[SM_API_MSG_ORIGIN_FIELD], 'SM_API_MSG_ORIGIN': sm_ack_list[SM_API_MSG_ORIGIN_FIELD],
'SM_API_MSG_NODE_NAME': sm_ack_list[SM_API_MSG_NODE_NAME_FIELD], 'SM_API_MSG_NODE_NAME': sm_ack_list[SM_API_MSG_NODE_NAME_FIELD],
'SM_API_MSG_NODE_ADMIN': sm_ack_list[SM_API_MSG_NODE_ADMIN_FIELD], 'SM_API_MSG_NODE_ADMIN': sm_ack_list[SM_API_MSG_NODE_ADMIN_FIELD],
'SM_API_MSG_NODE_OPER': sm_ack_list[SM_API_MSG_NODE_OPER_FIELD], 'SM_API_MSG_NODE_OPER': sm_ack_list[SM_API_MSG_NODE_OPER_FIELD],
'SM_API_MSG_NODE_AVAIL': sm_ack_list[SM_API_MSG_NODE_AVAIL_FIELD] 'SM_API_MSG_NODE_AVAIL': sm_ack_list[SM_API_MSG_NODE_AVAIL_FIELD]
} }
except: except Exception as e:
LOG.exception(_("sm-api ack message error: %s" % sm_ack)) LOG.exception(_("sm-api ack message error: %s" % sm_ack))
sm_ack_dict = { sm_ack_dict = {
'SM_API_MSG_TYPE': "unknown_set_node", 'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_ORIGIN': "sm", 'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'], 'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown", 'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPEsR': "unknown", 'SM_API_MSG_NODE_OPEsR': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown" 'SM_API_MSG_NODE_AVAIL': "unknown"
} }
return sm_ack_dict return sm_ack_dict
def sm_api_set_node_state(origin, hostname, action, admin, avail, oper, seqno): def sm_api_set_node_state(origin, hostname, action, admin, avail, oper, seqno):
sm_ack_dict = {} sm_ack_dict = {}
sm_dict = {'SM_API_MSG_TYPE': SM_API_MSG_TYPE_SET_NODE, sm_dict = {'SM_API_MSG_TYPE': SM_API_MSG_TYPE_SET_NODE,
'SM_API_MSG_ORIGIN': origin, 'SM_API_MSG_ORIGIN': origin,
'SM_API_MSG_NODE_NAME': hostname, 'SM_API_MSG_NODE_NAME': hostname,
'SM_API_MSG_NODE_ACTION': action, 'SM_API_MSG_NODE_ACTION': action,
'SM_API_MSG_NODE_ADMIN': admin, 'SM_API_MSG_NODE_ADMIN': admin,
'SM_API_MSG_NODE_OPER': oper, 'SM_API_MSG_NODE_OPER': oper,
'SM_API_MSG_NODE_AVAIL': avail, 'SM_API_MSG_NODE_AVAIL': avail,
'SM_API_MSG_SEQNO': seqno, 'SM_API_MSG_SEQNO': seqno,
} }
sm_ack_dict = sm_api_notify(sm_dict) sm_ack_dict = sm_api_notify(sm_dict)

View File

@ -61,14 +61,14 @@ def validate_patch(patch):
if not isinstance(p, dict) or \ if not isinstance(p, dict) or \
any(key for key in ["path", "op"] if key not in p): any(key for key in ["path", "op"] if key not in p):
raise wsme.exc.ClientSideError(_("Invalid patch format: %s") raise wsme.exc.ClientSideError(_("Invalid patch format: %s")
% str(p)) % str(p))
path = p["path"] path = p["path"]
op = p["op"] op = p["op"]
if op not in ["add", "replace", "remove"]: if op not in ["add", "replace", "remove"]:
raise wsme.exc.ClientSideError(_("Operation not supported: %s") raise wsme.exc.ClientSideError(_("Operation not supported: %s")
% op) % op)
if not path_pattern.match(path): if not path_pattern.match(path):
raise wsme.exc.ClientSideError(_("Invalid path: %s") % path) raise wsme.exc.ClientSideError(_("Invalid path: %s") % path)
@ -78,7 +78,7 @@ def validate_patch(patch):
raise wsme.exc.ClientSideError(_("Adding an additional " raise wsme.exc.ClientSideError(_("Adding an additional "
"attribute (%s) to the " "attribute (%s) to the "
"resource is not allowed") "resource is not allowed")
% path) % path)
class ValidTypes(wsme.types.UserType): class ValidTypes(wsme.types.UserType):

View File

@ -8,7 +8,6 @@
Hooks Hooks
""" """
import sqlite3
from pecan import hooks from pecan import hooks
from sm_api.common import context from sm_api.common import context
@ -60,6 +59,7 @@ class ContextHook(hooks.PecanHook):
or admin substring. Otherwise it is set to False. or admin substring. Otherwise it is set to False.
""" """
def __init__(self, public_api_routes): def __init__(self, public_api_routes):
self.public_api_routes = public_api_routes self.public_api_routes = public_api_routes
super(ContextHook, self).__init__() super(ContextHook, self).__init__()

View File

@ -20,6 +20,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
for public routes in the API. for public routes in the API.
""" """
def __init__(self, app, conf, public_api_routes=[]): def __init__(self, app, conf, public_api_routes=[]):
self.smapi_app = app self.smapi_app = app
self.public_api_routes = set(public_api_routes) self.public_api_routes = set(public_api_routes)

View File

@ -38,6 +38,7 @@ LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object): class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse. """Replace error body with something the client can parse.
""" """
def __init__(self, app): def __init__(self, app):
self.app = app self.app = app
@ -75,7 +76,7 @@ class ParsableErrorMiddleware(object):
req = webob.Request(environ) req = webob.Request(environ)
if (req.accept.best_match( if (req.accept.best_match(
['application/json', 'application/xml']) == ['application/json', 'application/xml']) ==
'application/xml'): 'application/xml'):
try: try:
# simple check xml is valid # simple check xml is valid
body = [et.ElementTree.tostring( body = [et.ElementTree.tostring(

View File

@ -73,7 +73,7 @@ class ProcessExecutionError(IOError):
def _cleanse_dict(original): def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict.""" """Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.items() if not "_pass" in k) return dict((k, v) for k, v in original.items() if "_pass" not in k)
def wrap_exception(notifier=None, publisher_id=None, event_type=None, def wrap_exception(notifier=None, publisher_id=None, event_type=None,
@ -319,12 +319,12 @@ class NodeInUse(SmApiException):
class NodeInWrongPowerState(SmApiException): class NodeInWrongPowerState(SmApiException):
message = _("Can not change instance association while node " message = _("Can not change instance association while node "
"%(node)s is in power state %(pstate)s.") "%(node)s is in power state %(pstate)s.")
class NodeNotConfigured(SmApiException): class NodeNotConfigured(SmApiException):
message = _("Can not change power state because node %(node)s " message = _("Can not change power state because node %(node)s "
"is not fully configured.") "is not fully configured.")
class ChassisNotEmpty(SmApiException): class ChassisNotEmpty(SmApiException):

View File

@ -37,7 +37,7 @@ policy_opts = [
cfg.StrOpt('policy_default_rule', cfg.StrOpt('policy_default_rule',
default='default', default='default',
help=_('Rule checked when requested rule is not found')), help=_('Rule checked when requested rule is not found')),
] ]
CONF = cfg.CONF CONF = cfg.CONF
CONF.register_opts(policy_opts) CONF.register_opts(policy_opts)

View File

@ -36,7 +36,7 @@ def getcallargs(function, *args, **kwargs):
keyed_args.update(kwargs) keyed_args.update(kwargs)
#NOTE(alaski) the implicit 'self' or 'cls' argument shows up in # NOTE(alaski) the implicit 'self' or 'cls' argument shows up in
# argnames but not in args or kwargs. Uses 'in' rather than '==' because # argnames but not in args or kwargs. Uses 'in' rather than '==' because
# some tests use 'self2'. # some tests use 'self2'.
if 'self' in argnames[0] or 'cls' == argnames[0]: if 'self' in argnames[0] or 'cls' == argnames[0]:

View File

@ -29,7 +29,7 @@ import errno
import hashlib import hashlib
import json import json
import os import os
#import paramiko # import paramiko
import random import random
import re import re
import shutil import shutil
@ -120,7 +120,7 @@ def execute(*cmd, **kwargs):
if len(kwargs): if len(kwargs):
raise exception.SmApiException(_('Got unknown keyword args ' raise exception.SmApiException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs) 'to utils.execute: %r') % kwargs)
if run_as_root and os.geteuid() != 0: if run_as_root and os.geteuid() != 0:
cmd = ['sudo', 'sm_api-rootwrap', CONF.rootwrap_config] + list(cmd) cmd = ['sudo', 'sm_api-rootwrap', CONF.rootwrap_config] + list(cmd)
@ -158,10 +158,10 @@ def execute(*cmd, **kwargs):
if not ignore_exit_code and _returncode not in check_exit_code: if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result (stdout, stderr) = result
raise exception.ProcessExecutionError( raise exception.ProcessExecutionError(
exit_code=_returncode, exit_code=_returncode,
stdout=stdout, stdout=stdout,
stderr=stderr, stderr=stderr,
cmd=' '.join(cmd)) cmd=' '.join(cmd))
return result return result
except exception.ProcessExecutionError: except exception.ProcessExecutionError:
if not attempts: if not attempts:
@ -376,7 +376,7 @@ def is_valid_cidr(address):
ip_segment = address.split('/') ip_segment = address.split('/')
if (len(ip_segment) <= 1 or if (len(ip_segment) <= 1 or
ip_segment[1] == ''): ip_segment[1] == ''):
return False return False
return True return True
@ -530,7 +530,7 @@ def mkfs(fs, path, label=None):
args = ['mkswap'] args = ['mkswap']
else: else:
args = ['mkfs', '-t', fs] args = ['mkfs', '-t', fs]
#add -F to force no interactive execute on non-block device. # add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4'): if fs in ('ext3', 'ext4'):
args.extend(['-F']) args.extend(['-F'])
if label: if label:
@ -548,13 +548,13 @@ def mkfs(fs, path, label=None):
# or reimplement as a common lib, # or reimplement as a common lib,
# or make a driver that doesn't need to do this. # or make a driver that doesn't need to do this.
# #
#def cache_image(context, target, image_id, user_id, project_id): # def cache_image(context, target, image_id, user_id, project_id):
# if not os.path.exists(target): # if not os.path.exists(target):
# libvirt_utils.fetch_image(context, target, image_id, # libvirt_utils.fetch_image(context, target, image_id,
# user_id, project_id) # user_id, project_id)
# #
# #
#def inject_into_image(image, key, net, metadata, admin_password, # def inject_into_image(image, key, net, metadata, admin_password,
# files, partition, use_cow=False): # files, partition, use_cow=False):
# try: # try:
# disk_api.inject_data(image, key, net, metadata, admin_password, # disk_api.inject_data(image, key, net, metadata, admin_password,
@ -572,7 +572,7 @@ def unlink_without_raise(path):
return return
else: else:
LOG.warn(_("Failed to unlink %(path)s, error: %(e)s") % LOG.warn(_("Failed to unlink %(path)s, error: %(e)s") %
{'path': path, 'e': e}) {'path': path, 'e': e})
def rmtree_without_raise(path): def rmtree_without_raise(path):
@ -581,7 +581,7 @@ def rmtree_without_raise(path):
shutil.rmtree(path) shutil.rmtree(path)
except OSError as e: except OSError as e:
LOG.warn(_("Failed to remove dir %(path)s, error: %(e)s") % LOG.warn(_("Failed to remove dir %(path)s, error: %(e)s") %
{'path': path, 'e': e}) {'path': path, 'e': e})
def write_to_file(path, contents): def write_to_file(path, contents):
@ -598,7 +598,7 @@ def create_link_without_raise(source, link):
else: else:
LOG.warn(_("Failed to create symlink from %(source)s to %(link)s" LOG.warn(_("Failed to create symlink from %(source)s to %(link)s"
", error: %(e)s") % ", error: %(e)s") %
{'source': source, 'link': link, 'e': e}) {'source': source, 'link': link, 'e': e})
def safe_rstrip(value, chars=None): def safe_rstrip(value, chars=None):
@ -659,13 +659,13 @@ def notify_mtc_and_recv(mtc_address, mtc_port, idict):
try: try:
mtc_response_dict = json.loads(mtc_response) mtc_response_dict = json.loads(mtc_response)
LOG.warning("Mtc Response: %s" % mtc_response_dict) LOG.warning("Mtc Response: %s" % mtc_response_dict)
except: except Exception as e:
LOG.exception("Mtc Response Error: %s" % mtc_response) LOG.exception("Mtc Response Error: %s" % mtc_response)
pass pass
except socket.error as e: except socket.error as e:
LOG.exception(_("Socket Error: %s on %s:%s for %s") % (e, LOG.exception(_("Socket Error: %s on %s:%s for %s") % (e,
mtc_address, mtc_port, serialized_idict)) mtc_address, mtc_port, serialized_idict))
# if e not in [errno.EWOULDBLOCK, errno.EINTR]: # if e not in [errno.EWOULDBLOCK, errno.EINTR]:
# raise exception.CommunicationError(_( # raise exception.CommunicationError(_(
# "Socket error: address=%s port=%s error=%s ") % ( # "Socket error: address=%s port=%s error=%s ") % (

View File

@ -157,7 +157,7 @@ class Connection(object):
@abc.abstractmethod @abc.abstractmethod
def sm_service_get_list(self, limit=None, marker=None, def sm_service_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None): sort_key=None, sort_dir=None):
"""Return a list of services. """Return a list of services.
:param limit: Maximum number of services to return. :param limit: Maximum number of services to return.
@ -174,7 +174,6 @@ class Connection(object):
:param name: The name of the services. :param name: The name of the services.
""" """
@abc.abstractmethod @abc.abstractmethod
def sm_service_group_members_get_list(self, service_group_name): def sm_service_group_members_get_list(self, service_group_name):
"""Return service group members in a service group """Return service group members in a service group

View File

@ -32,9 +32,9 @@ CONF.import_opt('backend',
group='database') group='database')
IMPL = utils.LazyPluggable( IMPL = utils.LazyPluggable(
pivot='backend', pivot='backend',
config_group='database', config_group='database',
sqlalchemy='sm_api.db.sqlalchemy.migration') sqlalchemy='sm_api.db.sqlalchemy.migration')
INIT_VERSION = 0 INIT_VERSION = 0

View File

@ -31,7 +31,6 @@ from sm_api.common import utils
from sm_api.db import api from sm_api.db import api
from sm_api.db.sqlalchemy import models from sm_api.db.sqlalchemy import models
from sm_api import objects from sm_api import objects
from sm_api.openstack.common.db import exception as db_exc
from sm_api.openstack.common.db.sqlalchemy import session as db_session from sm_api.openstack.common.db.sqlalchemy import session as db_session
from sm_api.openstack.common.db.sqlalchemy import utils as db_utils from sm_api.openstack.common.db.sqlalchemy import utils as db_utils
from sm_api.openstack.common import log from sm_api.openstack.common import log
@ -169,8 +168,7 @@ class Connection(api.Connection):
@objects.objectify(objects.service) @objects.objectify(objects.service)
def iservice_get_by_name(self, name): def iservice_get_by_name(self, name):
result = model_query(models.service, read_deleted="no").\ result = model_query(models.service, read_deleted="no").\
filter_by(name=name) filter_by(name=name)
# first() since want a list
if not result: if not result:
raise exception.NodeNotFound(node=name) raise exception.NodeNotFound(node=name)
@ -211,7 +209,7 @@ class Connection(api.Connection):
@objects.objectify(objects.sm_node) @objects.objectify(objects.sm_node)
def sm_node_get_list(self, limit=None, marker=None, def sm_node_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None): sort_key=None, sort_dir=None):
return _paginate_query(models.sm_node, limit, marker, return _paginate_query(models.sm_node, limit, marker,
sort_key, sort_dir) sort_key, sort_dir)
@ -231,8 +229,7 @@ class Connection(api.Connection):
@objects.objectify(objects.sm_node) @objects.objectify(objects.sm_node)
def sm_node_get_by_name(self, name): def sm_node_get_by_name(self, name):
result = model_query(models.sm_node, read_deleted="no").\ result = model_query(models.sm_node, read_deleted="no").\
filter_by(name=name) filter_by(name=name)
# first() since want a list
if not result: if not result:
raise exception.NodeNotFound(node=name) raise exception.NodeNotFound(node=name)
@ -256,21 +253,20 @@ class Connection(api.Connection):
@objects.objectify(objects.service) @objects.objectify(objects.service)
def sm_service_get_list(self, limit=None, marker=None, def sm_service_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None): sort_key=None, sort_dir=None):
return _paginate_query(models.service, limit, marker, return _paginate_query(models.service, limit, marker,
sort_key, sort_dir) sort_key, sort_dir)
@objects.objectify(objects.service) @objects.objectify(objects.service)
def sm_service_get_by_name(self, name): def sm_service_get_by_name(self, name):
result = model_query(models.service, read_deleted="no").\ result = model_query(models.service, read_deleted="no").\
filter_by(name=name) filter_by(name=name)
if not result: if not result:
raise exception.ServiceNotFound(service=name) raise exception.ServiceNotFound(service=name)
return result return result
@objects.objectify(objects.service_group_member) @objects.objectify(objects.service_group_member)
def sm_service_group_members_get_list(self, service_group_name): def sm_service_group_members_get_list(self, service_group_name):
result = model_query(models.sm_service_group_member, result = model_query(models.sm_service_group_member,

View File

@ -9,9 +9,7 @@
# #
from sqlalchemy import Column, MetaData, String, Table, UniqueConstraint from sqlalchemy import Column, MetaData, String, Table, UniqueConstraint
from sqlalchemy import Boolean, Integer, Enum, Text, ForeignKey, DateTime from sqlalchemy import Integer, Text, ForeignKey, DateTime
from sqlalchemy import Index
from sqlalchemy.dialects import postgresql
ENGINE = 'InnoDB' ENGINE = 'InnoDB'
CHARSET = 'utf8' CHARSET = 'utf8'
@ -70,5 +68,5 @@ def upgrade(migrate_engine):
def downgrade(migrate_engine): def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from Initial is unsupported.') raise NotImplementedError('Downgrade from Initial is unsupported.')
#t = Table('i_disk', meta, autoload=True) # t = Table('i_disk', meta, autoload=True)
#t.drop() # t.drop()

View File

@ -50,7 +50,7 @@ def patched_with_engine(f, *a, **kw):
# on that version or higher, this can be removed # on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3') MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION): dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine migrate_util.with_engine = patched_with_engine

View File

@ -29,9 +29,8 @@ import json
from six.moves.urllib.parse import urlparse from six.moves.urllib.parse import urlparse
from oslo_config import cfg from oslo_config import cfg
from sqlalchemy import Column, ForeignKey, Integer, Boolean from sqlalchemy import Column, ForeignKey, Integer
from sqlalchemy import Enum, UniqueConstraint, String from sqlalchemy import String
from sqlalchemy import Index
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.types import TypeDecorator, VARCHAR from sqlalchemy.types import TypeDecorator, VARCHAR

View File

@ -50,10 +50,10 @@ sm_node = smo_node.sm_node
service_group_member = smo_sgm.service_group_member service_group_member = smo_sgm.service_group_member
__all__ = ( __all__ = (
service_groups, service_groups,
service_group_member, service_group_member,
service, service,
sm_sdm, sm_sdm,
sm_sda, sm_sda,
sm_node, sm_node,
objectify) objectify)

View File

@ -106,6 +106,7 @@ def remotable_classmethod(fn):
# "orphaned" and remotable methods cannot be called. # "orphaned" and remotable methods cannot be called.
def remotable(fn): def remotable(fn):
"""Decorator for remotable object methods.""" """Decorator for remotable object methods."""
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
ctxt = self._context ctxt = self._context
try: try:
@ -337,7 +338,8 @@ class Sm_apiObject(object):
name in self.obj_extra_fields): name in self.obj_extra_fields):
yield name, getattr(self, name) yield name, getattr(self, name)
items = lambda self: list(self.items()) def items(self):
return list(self.items())
def __getitem__(self, name): def __getitem__(self, name):
"""For backwards-compatibility with dict-based objects. """For backwards-compatibility with dict-based objects.
@ -377,8 +379,8 @@ class Sm_apiObject(object):
def as_dict(self): def as_dict(self):
return dict((k, getattr(self, k)) return dict((k, getattr(self, k))
for k in self.fields for k in self.fields
if hasattr(self, k)) if hasattr(self, k))
@classmethod @classmethod
def get_defaults(cls): def get_defaults(cls):
@ -398,7 +400,7 @@ class ObjectListBase(object):
""" """
fields = { fields = {
'objects': list, 'objects': list,
} }
def __iter__(self): def __iter__(self):
"""List iterator interface.""" """List iterator interface."""

View File

@ -12,18 +12,19 @@ from sm_api.db import api as db_api
from sm_api.objects import base from sm_api.objects import base
from sm_api.objects import utils from sm_api.objects import utils
class sm_node(base.Sm_apiObject): class sm_node(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': int, 'id': int,
'name': utils.str_or_none, 'name': utils.str_or_none,
'administrative_state': utils.str_or_none, 'administrative_state': utils.str_or_none,
'operational_state': utils.str_or_none, 'operational_state': utils.str_or_none,
'availability_status': utils.str_or_none, 'availability_status': utils.str_or_none,
'ready_state': utils.str_or_none, 'ready_state': utils.str_or_none,
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -18,20 +18,20 @@ class sm_sda(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': int, 'id': int,
'uuid': utils.str_or_none, 'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none, # 'deleted': utils.str_or_none,
# 'created_at': utils.datetime_str_or_none, # 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none, # 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none, 'name': utils.str_or_none,
'node_name': utils.str_or_none, 'node_name': utils.str_or_none,
'service_group_name': utils.str_or_none, 'service_group_name': utils.str_or_none,
'state': utils.str_or_none, 'state': utils.str_or_none,
'desired_state': utils.str_or_none, 'desired_state': utils.str_or_none,
'status': utils.str_or_none, 'status': utils.str_or_none,
'condition': utils.str_or_none, 'condition': utils.str_or_none,
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -18,13 +18,13 @@ class sm_sdm(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': int, 'id': int,
'name': utils.str_or_none, 'name': utils.str_or_none,
'service_group_name': utils.str_or_none, 'service_group_name': utils.str_or_none,
'redundancy_model': utils.str_or_none, 'redundancy_model': utils.str_or_none,
'n_active': int, 'n_active': int,
'm_standby': int, 'm_standby': int,
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -18,12 +18,12 @@ class service(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': int, 'id': int,
'name': utils.str_or_none, 'name': utils.str_or_none,
'desired_state': utils.str_or_none, 'desired_state': utils.str_or_none,
'state': utils.str_or_none, 'state': utils.str_or_none,
'status': utils.str_or_none, 'status': utils.str_or_none,
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -18,16 +18,16 @@ class service_groups(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': utils.int_or_none, 'id': utils.int_or_none,
# 'uuid': utils.str_or_none, # 'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none, # 'deleted': utils.str_or_none,
# 'created_at': utils.datetime_str_or_none, # 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none, # 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none, 'name': utils.str_or_none,
'state': utils.str_or_none, 'state': utils.str_or_none,
'status': utils.str_or_none, 'status': utils.str_or_none,
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -18,11 +18,11 @@ class service_group_member(base.Sm_apiObject):
dbapi = db_api.get_instance() dbapi = db_api.get_instance()
fields = { fields = {
'id': utils.int_or_none, 'id': utils.int_or_none,
'name': utils.str_or_none, 'name': utils.str_or_none,
'service_name': utils.str_or_none, 'service_name': utils.str_or_none,
'service_failure_impact': utils.str_or_none 'service_failure_impact': utils.str_or_none
} }
@staticmethod @staticmethod
def _from_db_object(server, db_server): def _from_db_object(server, db_server):

View File

@ -105,6 +105,7 @@ def nested_object_or_none(objclass):
def dt_serializer(name): def dt_serializer(name):
"""Return a datetime serializer for a named attribute.""" """Return a datetime serializer for a named attribute."""
def serializer(self, name=name): def serializer(self, name=name):
if getattr(self, name) is not None: if getattr(self, name) is not None:
return timeutils.isotime(getattr(self, name)) return timeutils.isotime(getattr(self, name))

View File

@ -120,7 +120,7 @@ def _import_module(mod_str):
def _is_in_group(opt, group): def _is_in_group(opt, group):
"Check if opt is in group." "Check if opt is in group."
for key, value in group._opts.items(): for _key, value in group._opts.items():
if value['opt'] == opt: if value['opt'] == opt:
return True return True
return False return False
@ -132,7 +132,7 @@ def _guess_groups(opt, mod_obj):
return 'DEFAULT' return 'DEFAULT'
# what other groups is it in? # what other groups is it in?
for key, value in cfg.CONF.items(): for _key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr): if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group): if _is_in_group(opt, value._group):
return value._group.name return value._group.name
@ -254,5 +254,6 @@ def main():
sys.exit(0) sys.exit(0)
generate(sys.argv[1:]) generate(sys.argv[1:])
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -100,7 +100,7 @@ class DBAPI(object):
def __getattr__(self, key): def __getattr__(self, key):
backend = self.__backend or self.__get_backend() backend = self.__backend or self.__get_backend()
attr = getattr(backend, key) attr = getattr(backend, key)
if not self.__use_tpool or not hasattr(attr, '__call__'): if not self.__use_tpool or not callable(attr):
return attr return attr
def tpool_wrapper(*args, **kwargs): def tpool_wrapper(*args, **kwargs):

View File

@ -25,6 +25,7 @@ from sm_api.openstack.common.gettextutils import _ # noqa
class DBError(Exception): class DBError(Exception):
"""Wraps an implementation specific exception.""" """Wraps an implementation specific exception."""
def __init__(self, inner_exception=None): def __init__(self, inner_exception=None):
self.inner_exception = inner_exception self.inner_exception = inner_exception
super(DBError, self).__init__(str(inner_exception)) super(DBError, self).__init__(str(inner_exception))
@ -32,6 +33,7 @@ class DBError(Exception):
class DBDuplicateEntry(DBError): class DBDuplicateEntry(DBError):
"""Wraps an implementation specific exception.""" """Wraps an implementation specific exception."""
def __init__(self, columns=[], inner_exception=None): def __init__(self, columns=[], inner_exception=None):
self.columns = columns self.columns = columns
super(DBDuplicateEntry, self).__init__(inner_exception) super(DBDuplicateEntry, self).__init__(inner_exception)
@ -49,6 +51,7 @@ class DBInvalidUnicodeParameter(Exception):
class DbMigrationError(DBError): class DbMigrationError(DBError):
"""Wraps migration specific exception.""" """Wraps migration specific exception."""
def __init__(self, message=None): def __init__(self, message=None):
super(DbMigrationError, self).__init__(str(message)) super(DbMigrationError, self).__init__(str(message))

View File

@ -91,7 +91,7 @@ class ModelBase(object):
Includes attributes from joins.""" Includes attributes from joins."""
local = dict(self) local = dict(self)
joined = dict([(k, v) for k, v in self.__dict__.items() joined = dict([(k, v) for k, v in self.__dict__.items()
if not k[0] == '_']) if not k[0] == '_'])
local.update(joined) local.update(joined)
return iter(local.items()) return iter(local.items())

View File

@ -246,7 +246,6 @@ Efficient use of soft deletes:
""" """
import os.path
import re import re
import time import time
@ -277,13 +276,13 @@ sqlite_db_opts = [
] ]
database_opts = [ database_opts = [
cfg.StrOpt('connection', cfg.StrOpt('connection',
default='sqlite:////var/run/sm/sm.db', default='sqlite:////var/run/sm/sm.db',
help='The SQLAlchemy connection string used to connect to the ' help='The SQLAlchemy connection string used to connect to the '
'database', 'database',
deprecated_name='sql_connection', deprecated_name='sql_connection',
deprecated_group=DEFAULT, deprecated_group=DEFAULT,
secret=True), secret=True),
cfg.IntOpt('idle_timeout', cfg.IntOpt('idle_timeout',
default=3600, default=3600,
deprecated_name='sql_idle_timeout', deprecated_name='sql_idle_timeout',
@ -366,6 +365,7 @@ class SqliteForeignKeysListener(PoolListener):
so the foreign key constraints will be enabled here for every so the foreign key constraints will be enabled here for every
database connection database connection
""" """
def connect(self, dbapi_con, con_record): def connect(self, dbapi_con, con_record):
dbapi_con.execute('pragma foreign_keys=ON') dbapi_con.execute('pragma foreign_keys=ON')
@ -627,6 +627,7 @@ def create_engine(sql_connection, sqlite_fk=False):
class Query(sqlalchemy.orm.query.Query): class Query(sqlalchemy.orm.query.Query):
"""Subclass of sqlalchemy.query with soft_delete() method.""" """Subclass of sqlalchemy.query with soft_delete() method."""
def soft_delete(self, synchronize_session='evaluate'): def soft_delete(self, synchronize_session='evaluate'):
return self.update({'deleted': literal_column('id'), return self.update({'deleted': literal_column('id'),
'updated_at': literal_column('updated_at'), 'updated_at': literal_column('updated_at'),

View File

@ -52,4 +52,4 @@ def save_and_reraise_exception():
logging.error(_('Original exception being dropped: %s'), logging.error(_('Original exception being dropped: %s'),
traceback.format_exception(type_, value, tb)) traceback.format_exception(type_, value, tb))
raise raise
raise_(type_,value,tb) raise_(type_, value, tb)

View File

@ -19,7 +19,6 @@
# #
import contextlib import contextlib
import errno import errno
import os import os

View File

@ -19,7 +19,6 @@
# #
import errno import errno
import functools import functools
import os import os

View File

@ -367,8 +367,7 @@ def setup(product_name):
def set_defaults(logging_context_format_string): def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts, cfg.set_defaults(log_opts,
logging_context_format_string= logging_context_format_string=logging_context_format_string)
logging_context_format_string)
def _find_facility_from_conf(): def _find_facility_from_conf():
@ -452,6 +451,7 @@ def _setup_logging_from_conf():
logger = logging.getLogger(mod) logger = logging.getLogger(mod)
logger.setLevel(level) logger.setLevel(level)
_loggers = {} _loggers = {}

View File

@ -17,7 +17,6 @@
# #
def notify(_context, message): def notify(_context, message):
"""Notifies the recipient of the desired event given the model""" """Notifies the recipient of the desired event given the model"""
pass pass

View File

@ -17,7 +17,6 @@
# #
NOTIFICATIONS = [] NOTIFICATIONS = []

View File

@ -470,7 +470,7 @@ def _parse_tokenize(rule):
# Handle leading parens on the token # Handle leading parens on the token
clean = tok.lstrip('(') clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)): for _i in range(len(tok) - len(clean)):
yield '(', '(' yield '(', '('
# If it was only parentheses, continue # If it was only parentheses, continue
@ -498,7 +498,7 @@ def _parse_tokenize(rule):
yield 'check', _parse_check(clean) yield 'check', _parse_check(clean)
# Yield the trailing parens # Yield the trailing parens
for i in range(trail): for _i in range(trail):
yield ')', ')' yield ')', ')'

View File

@ -90,7 +90,7 @@ def main():
rawconfig.read(configfile) rawconfig.read(configfile)
config = wrapper.RootwrapConfig(rawconfig) config = wrapper.RootwrapConfig(rawconfig)
except ValueError as exc: except ValueError as exc:
msg = "Incorrect value in %s: %s" % (configfile, exc.message) msg = "Incorrect value in %s: %s" % (configfile, str(exc))
_exit_error(execname, msg, RC_BADCONFIG, log=False) _exit_error(execname, msg, RC_BADCONFIG, log=False)
except configparser.Error: except configparser.Error:
_exit_error(execname, "Incorrect configuration file: %s" % configfile, _exit_error(execname, "Incorrect configuration file: %s" % configfile,

View File

@ -105,7 +105,7 @@ class PathFilter(CommandFilter):
""" """
def match(self, userargs): def match(self, userargs):
command, arguments = userargs[0], userargs[1:] arguments = userargs[1:]
equal_args_num = len(self.args) == len(arguments) equal_args_num = len(self.args) == len(arguments)
exec_is_valid = super(PathFilter, self).match(userargs) exec_is_valid = super(PathFilter, self).match(userargs)

View File

@ -19,7 +19,6 @@
# #
from six.moves import configparser from six.moves import configparser
import logging import logging
import logging.handlers import logging.handlers
@ -39,6 +38,7 @@ class FilterMatchNotExecutable(Exception):
This exception is raised when a filter matched but no executable was This exception is raised when a filter matched but no executable was
found. found.
""" """
def __init__(self, match=None, **kwargs): def __init__(self, match=None, **kwargs):
self.match = match self.match = match

View File

@ -66,6 +66,7 @@ LOG = logging.getLogger(__name__)
class Pool(pools.Pool): class Pool(pools.Pool):
"""Class that implements a Pool of Connections.""" """Class that implements a Pool of Connections."""
def __init__(self, conf, connection_cls, *args, **kwargs): def __init__(self, conf, connection_cls, *args, **kwargs):
self.connection_cls = connection_cls self.connection_cls = connection_cls
self.conf = conf self.conf = conf
@ -188,6 +189,7 @@ class ConnectionContext(rpc_common.Connection):
class ReplyProxy(ConnectionContext): class ReplyProxy(ConnectionContext):
""" Connection class for RPC replies / callbacks """ """ Connection class for RPC replies / callbacks """
def __init__(self, conf, connection_pool): def __init__(self, conf, connection_pool):
self._call_waiters = {} self._call_waiters = {}
self._num_call_waiters = 0 self._num_call_waiters = 0
@ -240,7 +242,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
msg = {'result': reply, 'failure': failure} msg = {'result': reply, 'failure': failure}
except TypeError: except TypeError:
msg = {'result': dict((k, repr(v)) msg = {'result': dict((k, repr(v))
for k, v in reply.__dict__.items()), for k, v in reply.__dict__.items()),
'failure': failure} 'failure': failure}
if ending: if ending:
msg['ending'] = True msg['ending'] = True
@ -257,6 +259,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
class RpcContext(rpc_common.CommonRpcContext): class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call""" """Context that supports replying to a rpc.call"""
def __init__(self, **kwargs): def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None) self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None) self.reply_q = kwargs.pop('reply_q', None)
@ -517,7 +520,7 @@ class MulticallProxyWaiter(object):
yield result yield result
#TODO(pekowski): Remove MulticallWaiter() in Havana. # TODO(pekowski): Remove MulticallWaiter() in Havana.
class MulticallWaiter(object): class MulticallWaiter(object):
def __init__(self, conf, connection, timeout): def __init__(self, conf, connection, timeout):
self._connection = connection self._connection = connection

View File

@ -173,6 +173,7 @@ class Connection(object):
An instance of this class should never be created by users of the rpc API. An instance of this class should never be created by users of the rpc API.
Use rpc.create_connection() instead. Use rpc.create_connection() instead.
""" """
def close(self): def close(self):
"""Close the connection. """Close the connection.
@ -354,7 +355,9 @@ def deserialize_remote_exception(conf, data):
return RemoteError(name, failure.get('message'), trace) return RemoteError(name, failure.get('message'), trace)
ex_type = type(failure) ex_type = type(failure)
str_override = lambda self: message
def str_override(self):
return message
new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,), new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,),
{'__str__': str_override, '__unicode__': str_override}) {'__str__': str_override, '__unicode__': str_override})
try: try:
@ -422,6 +425,7 @@ class ClientException(Exception):
hit by an RPC proxy object. Merely instantiating it records the hit by an RPC proxy object. Merely instantiating it records the
current exception information, which will be passed back to the current exception information, which will be passed back to the
RPC client without exceptional logging.""" RPC client without exceptional logging."""
def __init__(self): def __init__(self):
self._exc_info = sys.exc_info() self._exc_info = sys.exc_info()

View File

@ -74,7 +74,7 @@ class Consumer(object):
# Caller might have called ctxt.reply() manually # Caller might have called ctxt.reply() manually
for (reply, failure) in ctxt._response: for (reply, failure) in ctxt._response:
if failure: if failure:
raise_(failure[0],failure[1],failure[2]) raise_(failure[0], failure[1], failure[2])
res.append(reply) res.append(reply)
# if ending not 'sent'...we might have more data to # if ending not 'sent'...we might have more data to
# return from the function itself # return from the function itself

View File

@ -322,6 +322,7 @@ class Publisher(object):
class DirectPublisher(Publisher): class DirectPublisher(Publisher):
"""Publisher class for 'direct'""" """Publisher class for 'direct'"""
def __init__(self, conf, channel, msg_id, **kwargs): def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher. """init a 'direct' publisher.
@ -338,6 +339,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher): class TopicPublisher(Publisher):
"""Publisher class for 'topic'""" """Publisher class for 'topic'"""
def __init__(self, conf, channel, topic, **kwargs): def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher. """init a 'topic' publisher.
@ -357,6 +359,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher): class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'""" """Publisher class for 'fanout'"""
def __init__(self, conf, channel, topic, **kwargs): def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher. """init a 'fanout' publisher.
@ -482,7 +485,7 @@ class Connection(object):
""" """
if self.connection: if self.connection:
LOG.info(_("Reconnecting to AMQP server on " LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params) "%(hostname)s:%(port)d") % params)
try: try:
self.connection.release() self.connection.release()
except self.connection_errors: except self.connection_errors:
@ -612,7 +615,7 @@ class Connection(object):
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': " LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info) "%(err_str)s") % log_info)
def _declare_consumer(): def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback, consumer = consumer_cls(self.conf, self.channel, topic, callback,
@ -673,7 +676,7 @@ class Connection(object):
def _error_callback(exc): def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic " LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info) "'%(topic)s': %(err_str)s") % log_info)
def _publish(): def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs) publisher = cls(self.conf, self.channel, topic, **kwargs)

View File

@ -239,6 +239,7 @@ class Publisher(object):
class DirectPublisher(Publisher): class DirectPublisher(Publisher):
"""Publisher class for 'direct'""" """Publisher class for 'direct'"""
def __init__(self, conf, session, msg_id): def __init__(self, conf, session, msg_id):
"""Init a 'direct' publisher.""" """Init a 'direct' publisher."""
super(DirectPublisher, self).__init__(session, msg_id, super(DirectPublisher, self).__init__(session, msg_id,
@ -247,6 +248,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher): class TopicPublisher(Publisher):
"""Publisher class for 'topic'""" """Publisher class for 'topic'"""
def __init__(self, conf, session, topic): def __init__(self, conf, session, topic):
"""init a 'topic' publisher. """init a 'topic' publisher.
""" """
@ -257,6 +259,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher): class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'""" """Publisher class for 'fanout'"""
def __init__(self, conf, session, topic): def __init__(self, conf, session, topic):
"""init a 'fanout' publisher. """init a 'fanout' publisher.
""" """
@ -267,6 +270,7 @@ class FanoutPublisher(Publisher):
class NotifyPublisher(Publisher): class NotifyPublisher(Publisher):
"""Publisher class for notifications""" """Publisher class for notifications"""
def __init__(self, conf, session, topic): def __init__(self, conf, session, topic):
"""init a 'topic' publisher. """init a 'topic' publisher.
""" """
@ -406,7 +410,7 @@ class Connection(object):
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': " LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info) "%(err_str)s") % log_info)
def _declare_consumer(): def _declare_consumer():
consumer = consumer_cls(self.conf, self.session, topic, callback) consumer = consumer_cls(self.conf, self.session, topic, callback)
@ -460,7 +464,7 @@ class Connection(object):
def _connect_error(exc): def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)} log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic " LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info) "'%(topic)s': %(err_str)s") % log_info)
def _publisher_send(): def _publisher_send():
publisher = cls(self.conf, self.session, topic) publisher = cls(self.conf, self.session, topic)

View File

@ -228,13 +228,13 @@ class ZmqClient(object):
if not envelope: if not envelope:
self.outq.send(map(bytes, self.outq.send(map(bytes,
(msg_id, topic, 'cast', _serialize(data)))) (msg_id, topic, 'cast', _serialize(data))))
return return
rpc_envelope = rpc_common.serialize_msg(data[1], envelope) rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items()) zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items())
self.outq.send(map(bytes, self.outq.send(map(bytes,
(msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg)) (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
def close(self): def close(self):
self.outq.close() self.outq.close()
@ -242,6 +242,7 @@ class ZmqClient(object):
class RpcContext(rpc_common.CommonRpcContext): class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call.""" """Context that supports replying to a rpc.call."""
def __init__(self, **kwargs): def __init__(self, **kwargs):
self.replies = [] self.replies = []
super(RpcContext, self).__init__(**kwargs) super(RpcContext, self).__init__(**kwargs)
@ -331,7 +332,7 @@ class ConsumerBase(object):
@classmethod @classmethod
def normalize_reply(self, result, replies): def normalize_reply(self, result, replies):
#TODO(ewindisch): re-evaluate and document this method. # TODO(ewindisch): re-evaluate and document this method.
if isinstance(result, types.GeneratorType): if isinstance(result, types.GeneratorType):
return list(result) return list(result)
elif replies: elif replies:
@ -451,7 +452,7 @@ class ZmqProxy(ZmqBaseReactor):
def consume(self, sock): def consume(self, sock):
ipc_dir = CONF.rpc_zmq_ipc_dir ipc_dir = CONF.rpc_zmq_ipc_dir
#TODO(ewindisch): use zero-copy (i.e. references, not copying) # TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv() data = sock.recv()
topic = data[1] topic = data[1]
@ -576,7 +577,7 @@ class ZmqReactor(ZmqBaseReactor):
super(ZmqReactor, self).__init__(conf) super(ZmqReactor, self).__init__(conf)
def consume(self, sock): def consume(self, sock):
#TODO(ewindisch): use zero-copy (i.e. references, not copying) # TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv() data = sock.recv()
LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data)
if sock in self.mapping: if sock in self.mapping:

View File

@ -56,6 +56,7 @@ class Exchange(object):
Implements lookups. Implements lookups.
Subclass this to support hashtables, dns, etc. Subclass this to support hashtables, dns, etc.
""" """
def __init__(self): def __init__(self):
pass pass
@ -67,6 +68,7 @@ class Binding(object):
""" """
A binding on which to perform a lookup. A binding on which to perform a lookup.
""" """
def __init__(self): def __init__(self):
pass pass
@ -80,6 +82,7 @@ class MatchMakerBase(object):
Build off HeartbeatMatchMakerBase if building a Build off HeartbeatMatchMakerBase if building a
heartbeat-capable MatchMaker. heartbeat-capable MatchMaker.
""" """
def __init__(self): def __init__(self):
# Array of tuples. Index [2] toggles negation, [3] is last-if-true # Array of tuples. Index [2] toggles negation, [3] is last-if-true
self.bindings = [] self.bindings = []
@ -145,9 +148,9 @@ class MatchMakerBase(object):
def add_binding(self, binding, rule, last=True): def add_binding(self, binding, rule, last=True):
self.bindings.append((binding, rule, False, last)) self.bindings.append((binding, rule, False, last))
#NOTE(ewindisch): kept the following method in case we implement the # NOTE(ewindisch): kept the following method in case we implement the
# underlying support. # underlying support.
#def add_negate_binding(self, binding, rule, last=True): # def add_negate_binding(self, binding, rule, last=True):
# self.bindings.append((binding, rule, True, last)) # self.bindings.append((binding, rule, True, last))
def queues(self, key): def queues(self, key):
@ -155,7 +158,7 @@ class MatchMakerBase(object):
# bit is for negate bindings - if we choose to implement it. # bit is for negate bindings - if we choose to implement it.
# last stops processing rules if this matches. # last stops processing rules if this matches.
for (binding, exchange, bit, last) in self.bindings: for (binding, exchange, _bit, last) in self.bindings:
if binding.test(key): if binding.test(key):
workers.extend(exchange.run(key)) workers.extend(exchange.run(key))
@ -171,6 +174,7 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
Provides common methods for registering, Provides common methods for registering,
unregistering, and maintaining heartbeats. unregistering, and maintaining heartbeats.
""" """
def __init__(self): def __init__(self):
self.hosts = set() self.hosts = set()
self._heart = None self._heart = None
@ -269,6 +273,7 @@ class DirectBinding(Binding):
Although dots are used in the key, the behavior here is Although dots are used in the key, the behavior here is
that it maps directly to a host, thus direct. that it maps directly to a host, thus direct.
""" """
def test(self, key): def test(self, key):
if '.' in key: if '.' in key:
return True return True
@ -283,6 +288,7 @@ class TopicBinding(Binding):
that of a topic exchange (whereas where there are dots, behavior that of a topic exchange (whereas where there are dots, behavior
matches that of a direct exchange. matches that of a direct exchange.
""" """
def test(self, key): def test(self, key):
if '.' not in key: if '.' not in key:
return True return True
@ -291,6 +297,7 @@ class TopicBinding(Binding):
class FanoutBinding(Binding): class FanoutBinding(Binding):
"""Match on fanout keys, where key starts with 'fanout.' string.""" """Match on fanout keys, where key starts with 'fanout.' string."""
def test(self, key): def test(self, key):
if key.startswith('fanout~'): if key.startswith('fanout~'):
return True return True
@ -299,12 +306,14 @@ class FanoutBinding(Binding):
class StubExchange(Exchange): class StubExchange(Exchange):
"""Exchange that does nothing.""" """Exchange that does nothing."""
def run(self, key): def run(self, key):
return [(key, None)] return [(key, None)]
class LocalhostExchange(Exchange): class LocalhostExchange(Exchange):
"""Exchange where all direct topics are local.""" """Exchange where all direct topics are local."""
def __init__(self, host='localhost'): def __init__(self, host='localhost'):
self.host = host self.host = host
super(Exchange, self).__init__() super(Exchange, self).__init__()
@ -318,6 +327,7 @@ class DirectExchange(Exchange):
Exchange where all topic keys are split, sending to second half. Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute.host" running on "host" i.e. "compute.host" sends a message to "compute.host" running on "host"
""" """
def __init__(self): def __init__(self):
super(Exchange, self).__init__() super(Exchange, self).__init__()
@ -331,6 +341,7 @@ class MatchMakerLocalhost(MatchMakerBase):
Match Maker where all bare topics resolve to localhost. Match Maker where all bare topics resolve to localhost.
Useful for testing. Useful for testing.
""" """
def __init__(self, host='localhost'): def __init__(self, host='localhost'):
super(MatchMakerLocalhost, self).__init__() super(MatchMakerLocalhost, self).__init__()
self.add_binding(FanoutBinding(), LocalhostExchange(host)) self.add_binding(FanoutBinding(), LocalhostExchange(host))
@ -344,6 +355,7 @@ class MatchMakerStub(MatchMakerBase):
Useful for testing, or for AMQP/brokered queues. Useful for testing, or for AMQP/brokered queues.
Will not work where knowledge of hosts is known (i.e. zeromq) Will not work where knowledge of hosts is known (i.e. zeromq)
""" """
def __init__(self): def __init__(self):
super(MatchMakerLocalhost, self).__init__() super(MatchMakerLocalhost, self).__init__()

View File

@ -63,6 +63,7 @@ class RedisTopicExchange(RedisExchange):
Exchange where all topic keys are split, sending to second half. Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute" running on "host" i.e. "compute.host" sends a message to "compute" running on "host"
""" """
def run(self, topic): def run(self, topic):
while True: while True:
member_name = self.redis.srandmember(topic) member_name = self.redis.srandmember(topic)
@ -84,11 +85,12 @@ class RedisFanoutExchange(RedisExchange):
""" """
Return a list of all hosts. Return a list of all hosts.
""" """
def run(self, topic): def run(self, topic):
topic = topic.split('~', 1)[1] topic = topic.split('~', 1)[1]
hosts = self.redis.smembers(topic) hosts = self.redis.smembers(topic)
good_hosts = [host for host in hosts if good_hosts = [host for host in hosts if
self.matchmaker.is_alive(topic, host)] self.matchmaker.is_alive(topic, host)]
return [(x, x.split('.', 1)[1]) for x in good_hosts] return [(x, x.split('.', 1)[1]) for x in good_hosts]
@ -97,6 +99,7 @@ class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase):
""" """
MatchMaker registering and looking-up hosts with a Redis server. MatchMaker registering and looking-up hosts with a Redis server.
""" """
def __init__(self): def __init__(self):
super(MatchMakerRedis, self).__init__() super(MatchMakerRedis, self).__init__()

View File

@ -54,6 +54,7 @@ class RingExchange(mm.Exchange):
__init__ takes optional ring dictionary argument, otherwise __init__ takes optional ring dictionary argument, otherwise
loads the ringfile from CONF.mathcmaker_ringfile. loads the ringfile from CONF.mathcmaker_ringfile.
""" """
def __init__(self, ring=None): def __init__(self, ring=None):
super(RingExchange, self).__init__() super(RingExchange, self).__init__()
@ -76,6 +77,7 @@ class RingExchange(mm.Exchange):
class RoundRobinRingExchange(RingExchange): class RoundRobinRingExchange(RingExchange):
"""A Topic Exchange based on a hashmap.""" """A Topic Exchange based on a hashmap."""
def __init__(self, ring=None): def __init__(self, ring=None):
super(RoundRobinRingExchange, self).__init__(ring) super(RoundRobinRingExchange, self).__init__(ring)
@ -92,6 +94,7 @@ class RoundRobinRingExchange(RingExchange):
class FanoutRingExchange(RingExchange): class FanoutRingExchange(RingExchange):
"""Fanout Exchange based on a hashmap.""" """Fanout Exchange based on a hashmap."""
def __init__(self, ring=None): def __init__(self, ring=None):
super(FanoutRingExchange, self).__init__(ring) super(FanoutRingExchange, self).__init__(ring)
@ -111,6 +114,7 @@ class MatchMakerRing(mm.MatchMakerBase):
""" """
Match Maker where hosts are loaded from a static hashmap. Match Maker where hosts are loaded from a static hashmap.
""" """
def __init__(self, ring=None): def __init__(self, ring=None):
super(MatchMakerRing, self).__init__() super(MatchMakerRing, self).__init__()
self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring))

View File

@ -35,6 +35,7 @@ class Service(service.Service):
"""Service object for binaries running on hosts. """Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host.""" A service enables rpc by listening to queues based on topic and host."""
def __init__(self, host, topic, manager=None, serializer=None): def __init__(self, host, topic, manager=None, serializer=None):
super(Service, self).__init__() super(Service, self).__init__()
self.host = host self.host = host

View File

@ -82,12 +82,12 @@ def parse_requirements(requirements_files=['requirements.txt',
# -e git://github.com/openstack/nova/master#egg=nova # -e git://github.com/openstack/nova/master#egg=nova
if re.match(r'\s*-e\s+', line): if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
line)) line))
# such as: # such as:
# http://github.com/openstack/nova/zipball/master#egg=nova # http://github.com/openstack/nova/zipball/master#egg=nova
elif re.match(r'\s*https?:', line): elif re.match(r'\s*https?:', line):
requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1', requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
line)) line))
# -f lines are for index locations, and don't get used here # -f lines are for index locations, and don't get used here
elif re.match(r'\s*-f\s+', line): elif re.match(r'\s*-f\s+', line):
pass pass

View File

@ -42,6 +42,7 @@ class Thread(object):
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
it has done so it can be removed from the threads list. it has done so it can be removed from the threads list.
""" """
def __init__(self, thread, group): def __init__(self, thread, group):
self.thread = thread self.thread = thread
self.thread.link(_thread_done, group=group, thread=self) self.thread.link(_thread_done, group=group, thread=self)
@ -60,6 +61,7 @@ class ThreadGroup(object):
when need be). when need be).
* provide an easy API to add timers. * provide an easy API to add timers.
""" """
def __init__(self, thread_pool_size=10): def __init__(self, thread_pool_size=10):
self.pool = greenpool.GreenPool(thread_pool_size) self.pool = greenpool.GreenPool(thread_pool_size)
self.threads = [] self.threads = []

View File

@ -52,9 +52,9 @@ def parse_isotime(timestr):
try: try:
return iso8601.parse_date(timestr) return iso8601.parse_date(timestr)
except iso8601.ParseError as e: except iso8601.ParseError as e:
raise ValueError(e.message) raise ValueError(str(e))
except TypeError as e: except TypeError as e:
raise ValueError(e.message) raise ValueError(str(e))
def strtime(at=None, fmt=PERFECT_TIME_FORMAT): def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
@ -124,7 +124,7 @@ def set_time_override(override_time=datetime.datetime.utcnow()):
def advance_time_delta(timedelta): def advance_time_delta(timedelta):
"""Advance overridden time using a datetime.timedelta.""" """Advance overridden time using a datetime.timedelta."""
assert(not utcnow.override_time is None) assert(utcnow.override_time is not None)
try: try:
for dt in utcnow.override_time: for dt in utcnow.override_time:
dt += timedelta dt += timedelta

View File

@ -8,6 +8,7 @@
from sm_client.common import utils from sm_client.common import utils
from keystoneclient.v3 import client as ksclient from keystoneclient.v3 import client as ksclient
def _get_ksclient(**kwargs): def _get_ksclient(**kwargs):
"""Get an endpoint and auth token from Keystone. """Get an endpoint and auth token from Keystone.

View File

@ -92,6 +92,7 @@ class Resource(object):
:param info: dictionary representing resource attributes :param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True :param loaded: prevent lazy-loading if set to True
""" """
def __init__(self, manager, info, loaded=False): def __init__(self, manager, info, loaded=False):
self.manager = manager self.manager = manager
self._info = info self._info = info

View File

@ -28,7 +28,7 @@ from six.moves.urllib.parse import urlparse
try: try:
import ssl import ssl
except ImportError: except ImportError:
#TODO(bcwaldon): Handle this failure more gracefully # TODO(bcwaldon): Handle this failure more gracefully
pass pass
try: try:

View File

@ -17,6 +17,7 @@
# #
from __future__ import print_function
import argparse import argparse
import os import os
import sys import sys
@ -98,7 +99,7 @@ def print_list(objs, fields, field_labels, formatters={}, sortby=0):
data = getattr(o, field, '') data = getattr(o, field, '')
row.append(data) row.append(data)
pt.add_row(row) pt.add_row(row)
print pt.get_string(sortby=field_labels[sortby]) print(pt.get_string(sortby=field_labels[sortby]))
def print_tuple_list(tuples, tuple_labels=[]): def print_tuple_list(tuples, tuple_labels=[]):
@ -111,11 +112,11 @@ def print_tuple_list(tuples, tuple_labels=[]):
if len(t) == 2: if len(t) == 2:
pt.add_row([t[0], t[1]]) pt.add_row([t[0], t[1]])
else: else:
for t,l in zip(tuples,tuple_labels): for t, l in zip(tuples, tuple_labels):
if len(t) == 2: if len(t) == 2:
pt.add_row([l, t[1]]) pt.add_row([l, t[1]])
print pt.get_string() print(pt.get_string())
def print_mapping(data, fields, dict_property="Property", wrap=0): def print_mapping(data, fields, dict_property="Property", wrap=0):
@ -142,7 +143,8 @@ def print_mapping(data, fields, dict_property="Property", wrap=0):
col1 = '' col1 = ''
else: else:
pt.add_row([k, v]) pt.add_row([k, v])
print pt.get_string() print(pt.get_string())
def print_dict(d, fields, dict_property="Property", wrap=0): def print_dict(d, fields, dict_property="Property", wrap=0):
pt = prettytable.PrettyTable([dict_property, 'Value'], pt = prettytable.PrettyTable([dict_property, 'Value'],
@ -169,7 +171,7 @@ def print_dict(d, fields, dict_property="Property", wrap=0):
col1 = '' col1 = ''
else: else:
pt.add_row([k, v]) pt.add_row([k, v])
print pt.get_string() print(pt.get_string())
def find_resource(manager, name_or_id): def find_resource(manager, name_or_id):
@ -258,5 +260,5 @@ def args_array_to_patch(op, attributes):
def exit(msg=''): def exit(msg=''):
if msg: if msg:
print >> sys.stderr, msg print(msg, file=sys.stderr)
sys.exit(1) sys.exit(1)

View File

@ -10,6 +10,7 @@ import sys
class BaseException(Exception): class BaseException(Exception):
"""An error occurred.""" """An error occurred."""
def __init__(self, message=None): def __init__(self, message=None):
self.message = message self.message = message
@ -164,6 +165,7 @@ class AmbiguousAuthSystem(ClientException):
"""Could not obtain token and endpoint using provided credentials.""" """Could not obtain token and endpoint using provided credentials."""
pass pass
# Alias for backwards compatibility # Alias for backwards compatibility
AmbigiousAuthSystem = AmbiguousAuthSystem AmbigiousAuthSystem = AmbiguousAuthSystem

View File

@ -120,7 +120,7 @@ def _import_module(mod_str):
def _is_in_group(opt, group): def _is_in_group(opt, group):
"Check if opt is in group." "Check if opt is in group."
for key, value in group._opts.items(): for _key, value in group._opts.items():
if value['opt'] == opt: if value['opt'] == opt:
return True return True
return False return False
@ -132,7 +132,7 @@ def _guess_groups(opt, mod_obj):
return 'DEFAULT' return 'DEFAULT'
# what other groups is it in? # what other groups is it in?
for key, value in cfg.CONF.items(): for _key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr): if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group): if _is_in_group(opt, value._group):
return value._group.name return value._group.name
@ -254,5 +254,6 @@ def main():
sys.exit(0) sys.exit(0)
generate(sys.argv[1:]) generate(sys.argv[1:])
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -79,7 +79,7 @@ def main():
rawconfig.read(configfile) rawconfig.read(configfile)
config = wrapper.RootwrapConfig(rawconfig) config = wrapper.RootwrapConfig(rawconfig)
except ValueError as exc: except ValueError as exc:
msg = "Incorrect value in %s: %s" % (configfile, exc.message) msg = "Incorrect value in %s: %s" % (configfile, str(exc))
_exit_error(execname, msg, RC_BADCONFIG, log=False) _exit_error(execname, msg, RC_BADCONFIG, log=False)
except configparser.Error: except configparser.Error:
_exit_error(execname, "Incorrect configuration file: %s" % configfile, _exit_error(execname, "Incorrect configuration file: %s" % configfile,

View File

@ -105,7 +105,7 @@ class PathFilter(CommandFilter):
""" """
def match(self, userargs): def match(self, userargs):
command, arguments = userargs[0], userargs[1:] arguments = userargs[1:]
equal_args_num = len(self.args) == len(arguments) equal_args_num = len(self.args) == len(arguments)
exec_is_valid = super(PathFilter, self).match(userargs) exec_is_valid = super(PathFilter, self).match(userargs)

View File

@ -19,7 +19,6 @@
# #
from six.moves import configparser from six.moves import configparser
import logging import logging
import logging.handlers import logging.handlers
@ -40,6 +39,7 @@ class FilterMatchNotExecutable(Exception):
This exception is raised when a filter matched but no executable was This exception is raised when a filter matched but no executable was
found. found.
""" """
def __init__(self, match=None, **kwargs): def __init__(self, match=None, **kwargs):
self.match = match self.match = match

View File

@ -8,7 +8,7 @@
""" """
Command-line interface for Service Manager (SM) Command-line interface for Service Manager (SM)
""" """
from __future__ import print_function
import argparse import argparse
import httplib2 import httplib2
import logging import logging
@ -131,7 +131,7 @@ class SmcShell(object):
parser.add_argument('--smc-api-version', parser.add_argument('--smc-api-version',
default=utils.env( default=utils.env(
'SMC_API_VERSION', default='1'), 'SMC_API_VERSION', default='1'),
help='Defaults to env[SMC_API_VERSION] ' help='Defaults to env[SMC_API_VERSION] '
'or 1') 'or 1')
@ -203,8 +203,8 @@ class SmcShell(object):
httplib2.debuglevel = 1 httplib2.debuglevel = 1
else: else:
logging.basicConfig( logging.basicConfig(
format="%(levelname)s %(message)s", format="%(levelname)s %(message)s",
level=logging.CRITICAL) level=logging.CRITICAL)
def main(self, argv): def main(self, argv):
# Parse args once to find version # Parse args once to find version
@ -249,7 +249,7 @@ class SmcShell(object):
client = cgclient.get_client(api_version, **(args.__dict__)) client = cgclient.get_client(api_version, **(args.__dict__))
nargs = args.__dict__ # nargs = args.__dict__
# nargs['neutron_endpoint'] = client.neutron_endpoint # nargs['neutron_endpoint'] = client.neutron_endpoint
# client.neutronClient = get_neutron_client(**nargs) # client.neutronClient = get_neutron_client(**nargs)
@ -284,8 +284,9 @@ def main():
SmcShell().main(sys.argv[1:]) SmcShell().main(sys.argv[1:])
except Exception as e: except Exception as e:
print >> sys.stderr, e print(e, file=sys.stderr)
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -16,7 +16,7 @@ COMMAND_MODULES = [
smc_service_shell, smc_service_shell,
smc_service_node_shell, smc_service_node_shell,
smc_servicegroup_shell, smc_servicegroup_shell,
] ]
def enhance_parser(parser, subparsers, cmd_mapper): def enhance_parser(parser, subparsers, cmd_mapper):

View File

@ -6,7 +6,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
# #
from sm_client import exc
from sm_client.common import base from sm_client.common import base

View File

@ -7,9 +7,9 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
# #
from sm_client import exc
from sm_client.common import base from sm_client.common import base
class SmcNode(base.Resource): class SmcNode(base.Resource):
def __repr__(self): def __repr__(self):
return "<SmcNode %s>" % self._info return "<SmcNode %s>" % self._info

View File

@ -16,6 +16,7 @@ def _print_sm_service_node_show(node):
'availability_status', 'ready_state'] 'availability_status', 'ready_state']
utils.print_mapping(node, fields, wrap=72) utils.print_mapping(node, fields, wrap=72)
def do_servicenode_list(cc, args): def do_servicenode_list(cc, args):
"""List Service Nodes.""" """List Service Nodes."""
try: try:
@ -44,6 +45,6 @@ def do_servicenode_show(cc, args):
"requires 'admin' level") "requires 'admin' level")
else: else:
if node is None: if node is None:
print "Service node %s could not be found" % args.node print("Service node %s could not be found" % args.node)
return return
_print_sm_service_node_show(node) _print_sm_service_node_show(node)

View File

@ -53,7 +53,7 @@ def do_service_show(cc, args):
"requires 'admin' level") "requires 'admin' level")
else: else:
if service is None: if service is None:
print "Service %s could not be found" % args.service print("Service %s could not be found" % args.service)
return return
if service.status: if service.status:
setattr(service, 'state', service.state + '-' + service.status) setattr(service, 'state', service.state + '-' + service.status)

View File

@ -7,7 +7,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
# #
from sm_client import exc
from sm_client.common import base from sm_client.common import base

View File

@ -45,7 +45,7 @@ def do_servicegroup_show(cc, args):
"requires 'admin' level") "requires 'admin' level")
else: else:
if servicegroup is None: if servicegroup is None:
print "Service group %s could not be found" % args.servicegroup print("Service group %s could not be found" % args.servicegroup)
return return
if servicegroup.status: if servicegroup.status:
setattr(servicegroup, 'state', servicegroup.state + '-' + setattr(servicegroup, 'state', servicegroup.state + '-' +

View File

@ -101,7 +101,7 @@ class InstallVenv(object):
print('Creating venv...', end=' ') print('Creating venv...', end=' ')
if no_site_packages: if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages', self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv]) self.venv])
else: else:
self.run_command(['virtualenv', '-q', self.venv]) self.run_command(['virtualenv', '-q', self.venv])
print('done.') print('done.')
@ -111,7 +111,7 @@ class InstallVenv(object):
def pip_install(self, *args): def pip_install(self, *args):
self.run_command(['tools/with_venv.sh', self.run_command(['tools/with_venv.sh',
'pip', 'install', '--upgrade'] + list(args), 'pip', 'install', '--upgrade'] + list(args),
redirect_output=False) redirect_output=False)
def install_dependencies(self): def install_dependencies(self):
@ -142,7 +142,7 @@ class Distro(InstallVenv):
def check_cmd(self, cmd): def check_cmd(self, cmd):
return bool(self.run_command(['which', cmd], return bool(self.run_command(['which', cmd],
check_exit_code=False).strip()) check_exit_code=False).strip())
def install_virtualenv(self): def install_virtualenv(self):
if self.check_cmd('virtualenv'): if self.check_cmd('virtualenv'):

View File

@ -49,7 +49,7 @@ def main():
si_parser.add_argument('parameters', help='instance parameters') si_parser.add_argument('parameters', help='instance parameters')
sys_parser = subparsers.add_parser('system', sys_parser = subparsers.add_parser('system',
help='system Configuration') help='system Configuration')
sys_parser.set_defaults(which='system') sys_parser.set_defaults(which='system')
sys_parser.add_argument( sys_parser.add_argument(
"--cpe_mode", choices=[cpe_duplex, cpe_duplex_direct], "--cpe_mode", choices=[cpe_duplex, cpe_duplex_direct],

View File

@ -20,7 +20,7 @@ def get_pid(pid_file):
pid = f.readline().strip('\n ') pid = f.readline().strip('\n ')
try: try:
pid = int(pid) pid = int(pid)
except: except Exception as e:
pid = -1 pid = -1
return pid return pid
return -1 return -1
@ -39,7 +39,7 @@ def get_process_name(pid):
name = ntpath.basename(cmd_line[1]) name = ntpath.basename(cmd_line[1])
return name return name
except: except Exception as e:
# most likely it is a leftover pid # most likely it is a leftover pid
return '' return ''
@ -112,7 +112,7 @@ def main():
pid_file = row[5] pid_file = row[5]
pid = get_pid(pid_file) pid = get_pid(pid_file)
pn = get_process_name(pid) pn = get_process_name(pid)
msg = "%-32s %-20s %-20s " % (row[0], row[1],row[2]) msg = "%-32s %-20s %-20s " % (row[0], row[1], row[2])
if args.impact: if args.impact:
msg += "%-10s" % (row[6]) msg += "%-10s" % (row[6])
if args.pid: if args.pid:
@ -122,7 +122,7 @@ def main():
if args.pid_file: if args.pid_file:
msg += "%-25s" % (pid_file) msg += "%-25s" % (pid_file)
msg += "%-10s %20s" % (row[3], row[4]) msg += "%-10s %20s" % (row[3], row[4])
print msg print(msg)
print("%s" % ('-' * len)) print("%s" % ('-' * len))
@ -168,7 +168,7 @@ def main():
pid_file = row[4] pid_file = row[4]
pid = get_pid(pid_file) pid = get_pid(pid_file)
pn = get_process_name(pid) pn = get_process_name(pid)
msg = "%-32s %-20s %-20s " % (row[0], row[1],row[2]) msg = "%-32s %-20s %-20s " % (row[0], row[1], row[2])
if args.impact: if args.impact:
msg += "%-10s" % (row[5]) msg += "%-10s" % (row[5])
if args.pid: if args.pid:
@ -193,9 +193,9 @@ def main():
try: try:
sys.stdout.close() sys.stdout.close()
except: except Exception as e:
pass pass
try: try:
sys.stderr.close() sys.stderr.close()
except: except Exception as e:
pass pass

View File

@ -64,7 +64,7 @@ def main():
cursor.execute("SELECT NAME, DESIRED_STATE, STATE FROM " cursor.execute("SELECT NAME, DESIRED_STATE, STATE FROM "
"SERVICE_GROUPS WHERE NAME IN (%s) AND PROVISIONED='yes';" "SERVICE_GROUPS WHERE NAME IN (%s) AND PROVISIONED='yes';"
% ','.join("'%s'"%i for i in args.service_group_name)) % ','.join("'%s'" % i for i in args.service_group_name))
rows = cursor.fetchall() rows = cursor.fetchall()
@ -90,7 +90,7 @@ def main():
not_found_list.append(g) not_found_list.append(g)
if len(not_found_list) > 1: if len(not_found_list) > 1:
print("%s are not provisioned"%','.join( (g for g in not_found_list))) print("%s are not provisioned" % ','.join((g for g in not_found_list)))
elif len(not_found_list) == 1: elif len(not_found_list) == 1:
print("%s is not provisioned" % ','.join((g for g in not_found_list))) print("%s is not provisioned" % ','.join((g for g in not_found_list)))

View File

@ -11,7 +11,7 @@
# SERVICE_GROUP_AGGREGATE_DESIRED_STATE=<service-group-state> # SERVICE_GROUP_AGGREGATE_DESIRED_STATE=<service-group-state>
# SERVICE_GROUP_AGGREGATE_STATE=<service-group-state> # SERVICE_GROUP_AGGREGATE_STATE=<service-group-state>
# #
# SERVICE_GROUP_NAME=<service-group-name> # SERVICE_GROUP_NAME=<service-group-name>
# SERVICE_GROUP_DESIRED_STATE=<service-group-state> # SERVICE_GROUP_DESIRED_STATE=<service-group-state>
# SERVICE_GROUP_STATE=<service-group-state> # SERVICE_GROUP_STATE=<service-group-state>
# SERVICE_GROUP_NOTIFICATION=<service-group-notification> # SERVICE_GROUP_NOTIFICATION=<service-group-notification>

45
tox.ini
View File

@ -51,40 +51,11 @@ commands =
[flake8] [flake8]
# The following are being suppressed for now # The following are being suppressed for now
# E114 indentation is not a multiple of four (comment)
# E116 unexpected indentation (comment)
# E121 continuation line under-indented for hanging indent
# E122 continuation line missing indentation or outdented
# E123 closing bracket does not match indentation of opening bracket's line
# E124 closing bracket does not match visual indentation
# E125 continuation line with same indent as next logical line
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line
# E131 continuation line unaligned for hanging indent
# E201 whitespace after '('
# E228 missing whitespace around modulo operator
# E231 missing whitespace after ','
# E241 multiple spaces after ':'
# E251 unexpected spaces around keyword / parameter equals
# E265 block comment should start with '#'
# E271 multiple spaces after keyword
# E302 expected 2 blank lines, found 1
# E303 too many blank lines
# E305 expected 2 blank lines after class or function definition, found 1
# E402 module level import not at top of file # E402 module level import not at top of file
# E713 test for membership should be 'not in'
# E714 test for object identity should be 'is not'
# E722 do not use bare except'
# E731 do not assign a lambda expression, use a def
# E999 SyntaxError: invalid syntax (this is likely python3)
# - hacking codes - # - hacking codes -
# H102: license header not found # H102: license header not found
# H104: File contains nothing but comments # H104: File contains nothing but comments
# H105: Don't use author tags # H105: Don't use author tags
# H201: no 'except:'
# H233: Python 3.x incompatible use of print operator
# H306: imports not in alphabetical order # H306: imports not in alphabetical order
# H401: docstring should not start with a space # H401: docstring should not start with a space
# H403: multi line docstrings should end on a new line # H403: multi line docstrings should end on a new line
@ -92,23 +63,15 @@ commands =
# H405: multi line docstring summary not separated with an empty line # H405: multi line docstring summary not separated with an empty line
# H501: Do not use locals() for string formatting # H501: Do not use locals() for string formatting
# - errors - # - errors -
# F401 <foo> imported but unused
# F811 redefinition of unused '<foo>' from line <x> # F811 redefinition of unused '<foo>' from line <x>
# F821 undefined name 'e' # F821 undefined name 'e'
# F841 local variable 'foo' is assigned to but never used
# - bugbear - # - bugbear -
# B001 Do not use bare `except:`. Prefer `except Exception:`. If you're sure what you're doing, be explicit and write `except BaseException:`.
# B004 Using `hasattr(x, '__call__')` to test if `x` is callable is unreliable.
# B006 Do not use mutable data structures for argument defaults. (python3) # B006 Do not use mutable data structures for argument defaults. (python3)
# B007 Loop control variable 'key' not used within the loop body. If this is intended, start the name with an underscore.
# B008 Do not perform calls in argument defaults. The call is performed only once at function definition time. # B008 Do not perform calls in argument defaults. The call is performed only once at function definition time.
# B305 `.next()` is not a thing on Python 3. Use the `next()` builtin. For Python 2 compatibility, use `six.next()`. ignore= E402,
# B306 `BaseException.message` has been deprecated as of Python 2.6 and is removed in Python 3. H102,H104,H105,H106,H306,H401,H403,H404,H405,H501,
ignore= E114,E116,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E228,E231,E241,E251,E265,E271, F811,F821,
E302,E303,E305,E402,E713,E714,E722,E731,E999, B006,B008
H102,H104,H105,H106,H201,H233,H306,H401,H403,H404,H405,H501,
F401,F811,F821,F841,
B001,B004,B006,B007,B008,B305,B306
# Enable checks which are off by default # Enable checks which are off by default
# H106 Dont put vim configuration in source files (off by default). SHOULD BE ENABLED. # H106 Dont put vim configuration in source files (off by default). SHOULD BE ENABLED.
# H203 Use assertIs(Not)None to check for None (off by default). # H203 Use assertIs(Not)None to check for None (off by default).