Fix flake8 errors and disable ignore case

below issures are removed from ignore cases:
    E114 indentation is not a multiple of four (comment)
    E116 unexpected indentation (comment)
    E121 continuation line under-indented for hanging indent
    E122 continuation line missing indentation or outdented
    E123 closing bracket does not match indentation of opening bracket's line
    E124 closing bracket does not match visual indentation
    E125 continuation line with same indent as next logical line
    E126 continuation line over-indented for hanging indent
    E127 continuation line over-indented for visual indent
    E128 continuation line under-indented for visual indent
    E129 visually indented line with same indent as next logical line
    E131 continuation line unaligned for hanging indent
    E201 whitespace after '('
    E228 missing whitespace around modulo operator
    E231 missing whitespace after ','
    E241 multiple spaces after ':'
    E251 unexpected spaces around keyword / parameter equals
    E265 block comment should start with '#'
    E271 multiple spaces after keyword
    E302 expected 2 blank lines, found 1
    E303 too many blank lines
    E305 expected 2 blank lines after class or function definition, found 1
    E704 multiple statements on one line (def)
    E713 test for membership should be 'not in'
    E714 test for object identity should be 'is not'
    E722 do not use bare except'
    E731 do not assign a lambda expression, use a def
    E999 SyntaxError: invalid syntax (this is likely python3)
    F401 <foo> imported but unused
    F841 local variable 'foo' is assigned to but never used
    H201: no 'except:'
    H233: Python 3.x incompatible use of print operator
    B001 Do not use bare `except:`
    B004 Using `hasattr(x, '__call__')` to test
         if `x` is callable is unreliable.
    B305 `.next()` is not a thing on Python 3. Use the `next()` builtin.
    B306 `BaseException.message` has been deprecated as of Python 2.6
          and is removed in Python 3.
    B007 Loop control variable 'key' not used within the loop body.
remain below issues in ignores:
    E402 module level import not at top of file
    ./service-mgmt-api/sm-api/sm_api/cmd/__init__.py:25

    Hxxx since which are related with document format

    F811 redefinition of unused '<foo>' from line <x>
    ./service-mgmt-tools/sm-tools/sm_tools/sm_configure.py:18

    F821 undefined name 'e'
    ./service-mgmt-api/sm-api/sm_api/common/utils.py:448

    B006 Do not use mutable data structures for argument defaults.
    ./service-mgmt-api/sm-api/sm_api/common/service.py:59

    B008 Do not perform calls in argument defaults.
    ./service-mgmt-api/sm-api/sm_api/openstack/common/timeutils.py:117

Test have been done:Build,Deploy,some smc command,such as smc service-list,
smc service-show, sm-dump, etc

Story: 2003430
Task: 26524

Change-Id: I3e2a4a31f87e3ff66cfce86f54285e830ee1c3dc
Signed-off-by: Sun Austin <austin.sun@intel.com>
This commit is contained in:
Sun Austin 2018-10-31 10:51:20 +08:00
parent fa29551d9e
commit da53f143ee
88 changed files with 354 additions and 356 deletions

View File

@ -6,19 +6,19 @@
from oslo_config import cfg
API_SERVICE_OPTS = [
cfg.StrOpt('sm_api_bind_ip',
default='0.0.0.0',
help='IP for the Service Management API server to bind to',
),
cfg.IntOpt('sm_api_port',
default=7777,
help='The port for the Service Management API server',
),
cfg.IntOpt('api_limit_max',
cfg.StrOpt('sm_api_bind_ip',
default='0.0.0.0',
help='IP for the Service Management API server to bind to',
),
cfg.IntOpt('sm_api_port',
default=7777,
help='The port for the Service Management API server',
),
cfg.IntOpt('api_limit_max',
default=1000,
help='the maximum number of items returned in a single '
'response from a collection resource'),
]
'response from a collection resource'),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api',

View File

@ -39,7 +39,7 @@ def register_opts(conf):
:param conf: SmApi settings.
"""
#conf.register_opts(keystone_auth_token._OPTS, group=OPT_GROUP_NAME)
# conf.register_opts(keystone_auth_token._OPTS, group=OPT_GROUP_NAME)
keystone_auth_token.CONF = conf
@ -69,6 +69,7 @@ class AdminAuthHook(hooks.PecanHook):
rejects the request otherwise.
"""
def before(self, state):
ctx = state.request.context

View File

@ -64,4 +64,5 @@ def main():
print(e)
sys.exit(-4)
main()

View File

@ -20,9 +20,9 @@ from sm_api.common import policy
auth_opts = [
cfg.StrOpt('auth_strategy',
default='keystone',
help='Method to use for auth: noauth or keystone.'),
]
default='keystone',
help='Method to use for auth: noauth or keystone.'),
]
CONF = cfg.CONF
CONF.register_opts(auth_opts)

View File

@ -14,4 +14,4 @@ app = {'root': 'sm_api.api.controllers.root.RootController',
'debug': False,
'enable_acl': True,
'acl_public_routes': ['/', '/v1']
}
}

View File

@ -75,10 +75,10 @@ class Version1(wsme_types.Base):
v1.servicenode = [link.Link.make_link('self',
pecan.request.host_url,
'servicenode', ''),
link.Link.make_link('bookmark',
pecan.request.host_url,
'servicenode', '',
bookmark=True)]
link.Link.make_link('bookmark',
pecan.request.host_url,
'servicenode', '',
bookmark=True)]
v1.sm_sda = [link.Link.make_link('self',
pecan.request.host_url,
'sm_sda', ''),
@ -103,4 +103,5 @@ class Controller(rest.RestController):
def get(self):
return Version1.convert()
__all__ = Controller

View File

@ -9,8 +9,6 @@
# All Rights Reserved.
#
import datetime
import wsme
from wsme import types as wsme_types
@ -50,6 +48,6 @@ class APIBase(wsme_types.Base):
# Unset non-required fields so they do not appear
# in the message body
obj_dict.update(dict((k, wsme.Unset)
for k in obj_dict.keys()
if fields and k not in fields))
for k in obj_dict.keys()
if fields and k not in fields))
return cls(**obj_dict)

View File

@ -48,8 +48,8 @@ class Collection(base.APIBase):
resource_url = url or self._type
q_args = ''.join(['%s=%s&' % (key, kwargs[key]) for key in kwargs])
next_args = '?%(args)slimit=%(limit)d&marker=%(marker)s' % {
'args': q_args, 'limit': limit,
'marker': self.collection[-1].uuid}
'args': q_args, 'limit': limit,
'marker': self.collection[-1].uuid}
return link.Link.make_link('next', pecan.request.host_url,
resource_url, next_args).href

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
import wsme
from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan
import pecan
@ -66,7 +64,7 @@ class Nodes(base.APIBase):
'ready_state']
fields = minimum_fields if not expand else None
nodes = Nodes.from_rpc_object(
rpc_nodes, fields)
rpc_nodes, fields)
return nodes
@ -85,8 +83,8 @@ class NodesCollection(collection.Collection):
expand=False, **kwargs):
collection = NodesCollection()
collection.nodes = [
Nodes.convert_with_links(ch, expand)
for ch in nodes]
Nodes.convert_with_links(ch, expand)
for ch in nodes]
url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@ -103,7 +101,7 @@ class NodesController(rest.RestController):
marker_obj = None
if marker:
marker_obj = objects.sm_node.get_by_uuid(
pecan.request.context, marker)
pecan.request.context, marker)
nodes = pecan.request.dbapi.sm_node_get_list(limit,
marker_obj,
@ -118,7 +116,7 @@ class NodesController(rest.RestController):
except exception.ServerNotFound:
return None
return Nodes.convert_with_links(rpc_sg)
return Nodes.convert_with_links(rpc_sg)
@wsme_pecan.wsexpose(NodesCollection, six.text_type, int,
six.text_type, six.text_type)
@ -132,8 +130,8 @@ class NodesController(rest.RestController):
sort_dir)
return NodesCollection.convert_with_links(nodes, limit,
sort_key=sort_key,
sort_dir=sort_dir)
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(NodesCommandResult, six.text_type,
body=NodesCommand)

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
import wsme
from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan
import pecan
@ -72,7 +70,7 @@ class ServiceGroup(base.APIBase):
fields = minimum_fields if not expand else None
service_groups = ServiceGroup.from_rpc_object(
rpc_service_groups, fields)
rpc_service_groups, fields)
return service_groups
@ -91,8 +89,8 @@ class ServiceGroupCollection(collection.Collection):
expand=False, **kwargs):
collection = ServiceGroupCollection()
collection.service_groups = [
ServiceGroup.convert_with_links(ch, expand)
for ch in service_groups]
ServiceGroup.convert_with_links(ch, expand)
for ch in service_groups]
url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@ -141,13 +139,13 @@ class ServiceGroupController(rest.RestController):
"""Retrieve list of servicegroups."""
service_groups = self._get_service_groups(marker,
limit,
sort_key,
sort_dir)
limit,
sort_key,
sort_dir)
return ServiceGroupCollection.convert_with_links(service_groups, limit,
sort_key=sort_key,
sort_dir=sort_dir)
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(ServiceGroupCommandResult, six.text_type,
body=ServiceGroupCommand)

View File

@ -33,7 +33,6 @@ import json
from sm_api.api.controllers.v1 import base
from sm_api.api.controllers.v1 import smc_api
from sm_api.openstack.common import log
from sm_api.api.controllers.v1 import services
LOG = log.getLogger(__name__)
@ -226,7 +225,7 @@ class ServiceNodeController(rest.RestController):
# check whether hostname exists in nodes table
node_exists = False
sm_nodes = pecan.request.dbapi.sm_node_get_by_name(hostname)
for sm_node in sm_nodes:
for _sm_node in sm_nodes:
node_exists = True
return node_exists
@ -339,7 +338,7 @@ class ServiceNodeController(rest.RestController):
# degraded or failure of A/A service on the target host
# would not stop swact
sdm = self._sm_sdm_get(sm_sda.name,
sm_sda.service_group_name)
sm_sda.service_group_name)
if (self._is_aa_service_group(sdm)):
continue
@ -349,14 +348,14 @@ class ServiceNodeController(rest.RestController):
# or service only provisioned in the other host
# or service state are the same on both hosts
if SM_SERVICE_GROUP_STATE_ACTIVE != sm_sda.state \
and SM_SERVICE_GROUP_STATE_STANDBY != sm_sda.state \
and sm_sda.service_group_name in origin_state \
and origin_state[sm_sda.service_group_name] != sm_sda.state:
and SM_SERVICE_GROUP_STATE_STANDBY != sm_sda.state \
and sm_sda.service_group_name in origin_state \
and origin_state[sm_sda.service_group_name] != sm_sda.state:
check_result = (
"%s on %s is not ready to take service, "
"service not in the active or standby "
"state" % (sm_sda.service_group_name,
sm_sda.node_name))
sm_sda.node_name))
break
# Verify that all the services are in the desired state on
@ -481,7 +480,7 @@ class ServiceNodeController(rest.RestController):
rsvc = self.get_remote_svc(sda.node_name, service_name)
if (SM_SERVICE_STATE_ENABLED_ACTIVE ==
rsvc['desired_state'] and
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
chk_list[sda.service_group_name].remove(service_name)
all_good = True
@ -495,7 +494,7 @@ class ServiceNodeController(rest.RestController):
target_services = []
for sda in sdas:
if (sda.node_name == hostname and
sda.service_group_name in chk_list):
sda.service_group_name in chk_list):
for service_name in chk_list[sda.service_group_name]:
LOG.info("checking %s on %s" % (service_name, hostname))
rsvc = self.get_remote_svc(sda.node_name, service_name)
@ -503,7 +502,7 @@ class ServiceNodeController(rest.RestController):
continue
if (SM_SERVICE_STATE_ENABLED_ACTIVE ==
rsvc['desired_state'] and
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
SM_SERVICE_STATE_ENABLED_ACTIVE == rsvc['state']):
LOG.info("which is %s %s" % (rsvc['desired_state'], rsvc['state']))
target_services.append(service_name)
LOG.info("services %s solely running on %s" % (','.join(target_services), hostname))
@ -579,30 +578,30 @@ class ServiceNodeController(rest.RestController):
ack_avail = sm_ack_dict['SM_API_MSG_NODE_AVAIL'].lower()
LOG.info("sm-api _do_modify_command sm_ack_dict: %s ACK admin: "
"%s oper: %s avail: %s." % (sm_ack_dict, ack_admin,
ack_oper, ack_avail))
"%s oper: %s avail: %s." % (sm_ack_dict, ack_admin,
ack_oper, ack_avail))
# loose check on admin and oper only
if (command.admin == ack_admin) and (command.oper == ack_oper):
return ServiceNodeCommandResult(
origin=sm_ack_dict['SM_API_MSG_ORIGIN'],
hostname=sm_ack_dict['SM_API_MSG_NODE_NAME'],
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin,
oper=ack_oper,
avail=ack_avail,
error_code=ERR_CODE_SUCCESS,
error_msg="success")
origin=sm_ack_dict['SM_API_MSG_ORIGIN'],
hostname=sm_ack_dict['SM_API_MSG_NODE_NAME'],
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin,
oper=ack_oper,
avail=ack_avail,
error_code=ERR_CODE_SUCCESS,
error_msg="success")
else:
result = ServiceNodeCommandResult(
origin="sm",
hostname=hostname,
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin,
oper=ack_oper,
avail=ack_avail,
error_code=ERR_CODE_ACTION_FAILED,
error_details="action failed")
origin="sm",
hostname=hostname,
action=sm_ack_dict['SM_API_MSG_NODE_ACTION'],
admin=ack_admin,
oper=ack_oper,
avail=ack_avail,
error_code=ERR_CODE_ACTION_FAILED,
error_details="action failed")
return wsme.api.Response(result, status_code=500)
else:
@ -613,7 +612,7 @@ class ServiceNodeController(rest.RestController):
try:
data = self._get_sm_node_state(hostname)
except:
except Exception as e:
LOG.exception("No entry in database for %s:" % hostname)
return ServiceNode(origin="sm",
hostname=hostname,

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
import wsme
from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan
import pecan
@ -65,7 +63,7 @@ class Services(base.APIBase):
minimum_fields = ['id', 'name', 'desired_state', 'state', 'status']
fields = minimum_fields if not expand else None
services = Services.from_rpc_object(
rpc_services, fields)
rpc_services, fields)
return services
@ -84,8 +82,8 @@ class ServicesCollection(collection.Collection):
expand=False, **kwargs):
collection = ServicesCollection()
collection.services = [
Services.convert_with_links(ch, expand)
for ch in services]
Services.convert_with_links(ch, expand)
for ch in services]
url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@ -102,12 +100,12 @@ class ServicesController(rest.RestController):
marker_obj = None
if marker:
marker_obj = objects.service.get_by_uuid(
pecan.request.context, marker)
pecan.request.context, marker)
services = pecan.request.dbapi.sm_service_get_list(limit,
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
return services
@wsme_pecan.wsexpose(Services, six.text_type)
@ -134,9 +132,9 @@ class ServicesController(rest.RestController):
"""Retrieve list of services."""
services = self._get_services(marker,
limit,
sort_key,
sort_dir)
limit,
sort_key,
sort_dir)
return ServicesCollection.convert_with_links(services, limit,
sort_key=sort_key,

View File

@ -4,8 +4,6 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
import wsme
from wsme import types as wsme_types
import wsmeext.pecan as wsme_pecan
import pecan
@ -71,7 +69,7 @@ class SmSda(base.APIBase):
'state', 'status', 'condition']
fields = minimum_fields if not expand else None
sm_sda = SmSda.from_rpc_object(
rpc_sm_sda, fields)
rpc_sm_sda, fields)
return sm_sda
@ -90,8 +88,8 @@ class SmSdaCollection(collection.Collection):
expand=False, **kwargs):
collection = SmSdaCollection()
collection.sm_sda = [
SmSda.convert_with_links(ch, expand)
for ch in sm_sda]
SmSda.convert_with_links(ch, expand)
for ch in sm_sda]
url = url or None
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection

View File

@ -91,19 +91,19 @@ def sm_api_notify(sm_dict):
break
else:
LOG.debug(_("sm-api mismatch seqno tx message: %s rx message: %s " % (sm_buf, sm_ack)))
except:
except Exception as e:
LOG.exception(_("sm-api bad rx message: %s" % sm_ack))
except socket.error as e:
LOG.exception(_("sm-api socket error: %s on %s") % (e, sm_buf))
sm_ack_dict = {
'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_NODE_ACTION': sm_dict['SM_API_MSG_NODE_ACTION'],
'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPER': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown"}
'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_NODE_ACTION': sm_dict['SM_API_MSG_NODE_ACTION'],
'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPER': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown"}
return sm_ack_dict
@ -116,46 +116,46 @@ def sm_api_notify(sm_dict):
try:
sm_ack_list = sm_ack.split(",")
sm_ack_dict = {
'SM_API_MSG_VERSION': sm_ack_list[SM_API_MSG_VERSION_FIELD],
'SM_API_MSG_REVISION': sm_ack_list[SM_API_MSG_REVISION_FIELD],
'SM_API_MSG_SEQNO': sm_ack_list[SM_API_MSG_SEQNO_FIELD],
'SM_API_MSG_TYPE': sm_ack_list[SM_API_MSG_TYPE_FIELD],
'SM_API_MSG_NODE_ACTION': sm_ack_list[SM_API_MSG_NODE_ACTION_FIELD],
'SM_API_MSG_VERSION': sm_ack_list[SM_API_MSG_VERSION_FIELD],
'SM_API_MSG_REVISION': sm_ack_list[SM_API_MSG_REVISION_FIELD],
'SM_API_MSG_SEQNO': sm_ack_list[SM_API_MSG_SEQNO_FIELD],
'SM_API_MSG_TYPE': sm_ack_list[SM_API_MSG_TYPE_FIELD],
'SM_API_MSG_NODE_ACTION': sm_ack_list[SM_API_MSG_NODE_ACTION_FIELD],
'SM_API_MSG_ORIGIN': sm_ack_list[SM_API_MSG_ORIGIN_FIELD],
'SM_API_MSG_NODE_NAME': sm_ack_list[SM_API_MSG_NODE_NAME_FIELD],
'SM_API_MSG_NODE_ADMIN': sm_ack_list[SM_API_MSG_NODE_ADMIN_FIELD],
'SM_API_MSG_NODE_OPER': sm_ack_list[SM_API_MSG_NODE_OPER_FIELD],
'SM_API_MSG_NODE_AVAIL': sm_ack_list[SM_API_MSG_NODE_AVAIL_FIELD]
}
except:
'SM_API_MSG_ORIGIN': sm_ack_list[SM_API_MSG_ORIGIN_FIELD],
'SM_API_MSG_NODE_NAME': sm_ack_list[SM_API_MSG_NODE_NAME_FIELD],
'SM_API_MSG_NODE_ADMIN': sm_ack_list[SM_API_MSG_NODE_ADMIN_FIELD],
'SM_API_MSG_NODE_OPER': sm_ack_list[SM_API_MSG_NODE_OPER_FIELD],
'SM_API_MSG_NODE_AVAIL': sm_ack_list[SM_API_MSG_NODE_AVAIL_FIELD]
}
except Exception as e:
LOG.exception(_("sm-api ack message error: %s" % sm_ack))
sm_ack_dict = {
'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_TYPE': "unknown_set_node",
'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPEsR': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown"
}
'SM_API_MSG_ORIGIN': "sm",
'SM_API_MSG_NODE_NAME': sm_dict['SM_API_MSG_NODE_NAME'],
'SM_API_MSG_NODE_ADMIN': "unknown",
'SM_API_MSG_NODE_OPEsR': "unknown",
'SM_API_MSG_NODE_AVAIL': "unknown"
}
return sm_ack_dict
def sm_api_set_node_state(origin, hostname, action, admin, avail, oper, seqno):
sm_ack_dict = {}
sm_dict = {'SM_API_MSG_TYPE': SM_API_MSG_TYPE_SET_NODE,
sm_dict = {'SM_API_MSG_TYPE': SM_API_MSG_TYPE_SET_NODE,
'SM_API_MSG_ORIGIN': origin,
'SM_API_MSG_NODE_NAME': hostname,
'SM_API_MSG_NODE_ACTION': action,
'SM_API_MSG_NODE_ADMIN': admin,
'SM_API_MSG_NODE_OPER': oper,
'SM_API_MSG_NODE_AVAIL': avail,
'SM_API_MSG_NODE_ADMIN': admin,
'SM_API_MSG_NODE_OPER': oper,
'SM_API_MSG_NODE_AVAIL': avail,
'SM_API_MSG_SEQNO': seqno,
}
}
sm_ack_dict = sm_api_notify(sm_dict)

View File

@ -61,14 +61,14 @@ def validate_patch(patch):
if not isinstance(p, dict) or \
any(key for key in ["path", "op"] if key not in p):
raise wsme.exc.ClientSideError(_("Invalid patch format: %s")
% str(p))
% str(p))
path = p["path"]
op = p["op"]
if op not in ["add", "replace", "remove"]:
raise wsme.exc.ClientSideError(_("Operation not supported: %s")
% op)
% op)
if not path_pattern.match(path):
raise wsme.exc.ClientSideError(_("Invalid path: %s") % path)
@ -78,7 +78,7 @@ def validate_patch(patch):
raise wsme.exc.ClientSideError(_("Adding an additional "
"attribute (%s) to the "
"resource is not allowed")
% path)
% path)
class ValidTypes(wsme.types.UserType):

View File

@ -8,7 +8,6 @@
Hooks
"""
import sqlite3
from pecan import hooks
from sm_api.common import context
@ -60,6 +59,7 @@ class ContextHook(hooks.PecanHook):
or admin substring. Otherwise it is set to False.
"""
def __init__(self, public_api_routes):
self.public_api_routes = public_api_routes
super(ContextHook, self).__init__()

View File

@ -20,6 +20,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
for public routes in the API.
"""
def __init__(self, app, conf, public_api_routes=[]):
self.smapi_app = app
self.public_api_routes = set(public_api_routes)

View File

@ -38,6 +38,7 @@ LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse.
"""
def __init__(self, app):
self.app = app
@ -75,7 +76,7 @@ class ParsableErrorMiddleware(object):
req = webob.Request(environ)
if (req.accept.best_match(
['application/json', 'application/xml']) ==
'application/xml'):
'application/xml'):
try:
# simple check xml is valid
body = [et.ElementTree.tostring(

View File

@ -73,7 +73,7 @@ class ProcessExecutionError(IOError):
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.items() if not "_pass" in k)
return dict((k, v) for k, v in original.items() if "_pass" not in k)
def wrap_exception(notifier=None, publisher_id=None, event_type=None,
@ -319,12 +319,12 @@ class NodeInUse(SmApiException):
class NodeInWrongPowerState(SmApiException):
message = _("Can not change instance association while node "
"%(node)s is in power state %(pstate)s.")
"%(node)s is in power state %(pstate)s.")
class NodeNotConfigured(SmApiException):
message = _("Can not change power state because node %(node)s "
"is not fully configured.")
"is not fully configured.")
class ChassisNotEmpty(SmApiException):

View File

@ -37,7 +37,7 @@ policy_opts = [
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Rule checked when requested rule is not found')),
]
]
CONF = cfg.CONF
CONF.register_opts(policy_opts)

View File

@ -36,7 +36,7 @@ def getcallargs(function, *args, **kwargs):
keyed_args.update(kwargs)
#NOTE(alaski) the implicit 'self' or 'cls' argument shows up in
# NOTE(alaski) the implicit 'self' or 'cls' argument shows up in
# argnames but not in args or kwargs. Uses 'in' rather than '==' because
# some tests use 'self2'.
if 'self' in argnames[0] or 'cls' == argnames[0]:

View File

@ -29,7 +29,7 @@ import errno
import hashlib
import json
import os
#import paramiko
# import paramiko
import random
import re
import shutil
@ -120,7 +120,7 @@ def execute(*cmd, **kwargs):
if len(kwargs):
raise exception.SmApiException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
'to utils.execute: %r') % kwargs)
if run_as_root and os.geteuid() != 0:
cmd = ['sudo', 'sm_api-rootwrap', CONF.rootwrap_config] + list(cmd)
@ -158,10 +158,10 @@ def execute(*cmd, **kwargs):
if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
raise exception.ProcessExecutionError(
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except exception.ProcessExecutionError:
if not attempts:
@ -376,7 +376,7 @@ def is_valid_cidr(address):
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
ip_segment[1] == ''):
return False
return True
@ -530,7 +530,7 @@ def mkfs(fs, path, label=None):
args = ['mkswap']
else:
args = ['mkfs', '-t', fs]
#add -F to force no interactive execute on non-block device.
# add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4'):
args.extend(['-F'])
if label:
@ -548,13 +548,13 @@ def mkfs(fs, path, label=None):
# or reimplement as a common lib,
# or make a driver that doesn't need to do this.
#
#def cache_image(context, target, image_id, user_id, project_id):
# def cache_image(context, target, image_id, user_id, project_id):
# if not os.path.exists(target):
# libvirt_utils.fetch_image(context, target, image_id,
# user_id, project_id)
#
#
#def inject_into_image(image, key, net, metadata, admin_password,
# def inject_into_image(image, key, net, metadata, admin_password,
# files, partition, use_cow=False):
# try:
# disk_api.inject_data(image, key, net, metadata, admin_password,
@ -572,7 +572,7 @@ def unlink_without_raise(path):
return
else:
LOG.warn(_("Failed to unlink %(path)s, error: %(e)s") %
{'path': path, 'e': e})
{'path': path, 'e': e})
def rmtree_without_raise(path):
@ -581,7 +581,7 @@ def rmtree_without_raise(path):
shutil.rmtree(path)
except OSError as e:
LOG.warn(_("Failed to remove dir %(path)s, error: %(e)s") %
{'path': path, 'e': e})
{'path': path, 'e': e})
def write_to_file(path, contents):
@ -598,7 +598,7 @@ def create_link_without_raise(source, link):
else:
LOG.warn(_("Failed to create symlink from %(source)s to %(link)s"
", error: %(e)s") %
{'source': source, 'link': link, 'e': e})
{'source': source, 'link': link, 'e': e})
def safe_rstrip(value, chars=None):
@ -659,13 +659,13 @@ def notify_mtc_and_recv(mtc_address, mtc_port, idict):
try:
mtc_response_dict = json.loads(mtc_response)
LOG.warning("Mtc Response: %s" % mtc_response_dict)
except:
except Exception as e:
LOG.exception("Mtc Response Error: %s" % mtc_response)
pass
except socket.error as e:
LOG.exception(_("Socket Error: %s on %s:%s for %s") % (e,
mtc_address, mtc_port, serialized_idict))
mtc_address, mtc_port, serialized_idict))
# if e not in [errno.EWOULDBLOCK, errno.EINTR]:
# raise exception.CommunicationError(_(
# "Socket error: address=%s port=%s error=%s ") % (

View File

@ -157,7 +157,7 @@ class Connection(object):
@abc.abstractmethod
def sm_service_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None):
sort_key=None, sort_dir=None):
"""Return a list of services.
:param limit: Maximum number of services to return.
@ -174,7 +174,6 @@ class Connection(object):
:param name: The name of the services.
"""
@abc.abstractmethod
def sm_service_group_members_get_list(self, service_group_name):
"""Return service group members in a service group

View File

@ -32,9 +32,9 @@ CONF.import_opt('backend',
group='database')
IMPL = utils.LazyPluggable(
pivot='backend',
config_group='database',
sqlalchemy='sm_api.db.sqlalchemy.migration')
pivot='backend',
config_group='database',
sqlalchemy='sm_api.db.sqlalchemy.migration')
INIT_VERSION = 0

View File

@ -31,7 +31,6 @@ from sm_api.common import utils
from sm_api.db import api
from sm_api.db.sqlalchemy import models
from sm_api import objects
from sm_api.openstack.common.db import exception as db_exc
from sm_api.openstack.common.db.sqlalchemy import session as db_session
from sm_api.openstack.common.db.sqlalchemy import utils as db_utils
from sm_api.openstack.common import log
@ -169,8 +168,7 @@ class Connection(api.Connection):
@objects.objectify(objects.service)
def iservice_get_by_name(self, name):
result = model_query(models.service, read_deleted="no").\
filter_by(name=name)
# first() since want a list
filter_by(name=name)
if not result:
raise exception.NodeNotFound(node=name)
@ -211,7 +209,7 @@ class Connection(api.Connection):
@objects.objectify(objects.sm_node)
def sm_node_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None):
sort_key=None, sort_dir=None):
return _paginate_query(models.sm_node, limit, marker,
sort_key, sort_dir)
@ -231,8 +229,7 @@ class Connection(api.Connection):
@objects.objectify(objects.sm_node)
def sm_node_get_by_name(self, name):
result = model_query(models.sm_node, read_deleted="no").\
filter_by(name=name)
# first() since want a list
filter_by(name=name)
if not result:
raise exception.NodeNotFound(node=name)
@ -256,21 +253,20 @@ class Connection(api.Connection):
@objects.objectify(objects.service)
def sm_service_get_list(self, limit=None, marker=None,
sort_key=None, sort_dir=None):
sort_key=None, sort_dir=None):
return _paginate_query(models.service, limit, marker,
sort_key, sort_dir)
@objects.objectify(objects.service)
def sm_service_get_by_name(self, name):
result = model_query(models.service, read_deleted="no").\
filter_by(name=name)
filter_by(name=name)
if not result:
raise exception.ServiceNotFound(service=name)
return result
@objects.objectify(objects.service_group_member)
def sm_service_group_members_get_list(self, service_group_name):
result = model_query(models.sm_service_group_member,

View File

@ -9,9 +9,7 @@
#
from sqlalchemy import Column, MetaData, String, Table, UniqueConstraint
from sqlalchemy import Boolean, Integer, Enum, Text, ForeignKey, DateTime
from sqlalchemy import Index
from sqlalchemy.dialects import postgresql
from sqlalchemy import Integer, Text, ForeignKey, DateTime
ENGINE = 'InnoDB'
CHARSET = 'utf8'
@ -70,5 +68,5 @@ def upgrade(migrate_engine):
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from Initial is unsupported.')
#t = Table('i_disk', meta, autoload=True)
#t.drop()
# t = Table('i_disk', meta, autoload=True)
# t.drop()

View File

@ -50,7 +50,7 @@ def patched_with_engine(f, *a, **kw):
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine

View File

@ -29,9 +29,8 @@ import json
from six.moves.urllib.parse import urlparse
from oslo_config import cfg
from sqlalchemy import Column, ForeignKey, Integer, Boolean
from sqlalchemy import Enum, UniqueConstraint, String
from sqlalchemy import Index
from sqlalchemy import Column, ForeignKey, Integer
from sqlalchemy import String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.types import TypeDecorator, VARCHAR

View File

@ -50,10 +50,10 @@ sm_node = smo_node.sm_node
service_group_member = smo_sgm.service_group_member
__all__ = (
service_groups,
service_group_member,
service,
sm_sdm,
sm_sda,
sm_node,
objectify)
service_groups,
service_group_member,
service,
sm_sdm,
sm_sda,
sm_node,
objectify)

View File

@ -106,6 +106,7 @@ def remotable_classmethod(fn):
# "orphaned" and remotable methods cannot be called.
def remotable(fn):
"""Decorator for remotable object methods."""
def wrapper(self, *args, **kwargs):
ctxt = self._context
try:
@ -337,7 +338,8 @@ class Sm_apiObject(object):
name in self.obj_extra_fields):
yield name, getattr(self, name)
items = lambda self: list(self.items())
def items(self):
return list(self.items())
def __getitem__(self, name):
"""For backwards-compatibility with dict-based objects.
@ -377,8 +379,8 @@ class Sm_apiObject(object):
def as_dict(self):
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k))
for k in self.fields
if hasattr(self, k))
@classmethod
def get_defaults(cls):
@ -398,7 +400,7 @@ class ObjectListBase(object):
"""
fields = {
'objects': list,
}
}
def __iter__(self):
"""List iterator interface."""

View File

@ -12,18 +12,19 @@ from sm_api.db import api as db_api
from sm_api.objects import base
from sm_api.objects import utils
class sm_node(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': int,
'name': utils.str_or_none,
'administrative_state': utils.str_or_none,
'operational_state': utils.str_or_none,
'availability_status': utils.str_or_none,
'ready_state': utils.str_or_none,
}
'id': int,
'name': utils.str_or_none,
'administrative_state': utils.str_or_none,
'operational_state': utils.str_or_none,
'availability_status': utils.str_or_none,
'ready_state': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -18,20 +18,20 @@ class sm_sda(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': int,
'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none,
'id': int,
'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none,
# 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none,
'node_name': utils.str_or_none,
'service_group_name': utils.str_or_none,
'state': utils.str_or_none,
'desired_state': utils.str_or_none,
'status': utils.str_or_none,
'condition': utils.str_or_none,
}
# 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none,
'node_name': utils.str_or_none,
'service_group_name': utils.str_or_none,
'state': utils.str_or_none,
'desired_state': utils.str_or_none,
'status': utils.str_or_none,
'condition': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -18,13 +18,13 @@ class sm_sdm(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': int,
'name': utils.str_or_none,
'service_group_name': utils.str_or_none,
'redundancy_model': utils.str_or_none,
'n_active': int,
'm_standby': int,
}
'id': int,
'name': utils.str_or_none,
'service_group_name': utils.str_or_none,
'redundancy_model': utils.str_or_none,
'n_active': int,
'm_standby': int,
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -18,12 +18,12 @@ class service(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': int,
'name': utils.str_or_none,
'desired_state': utils.str_or_none,
'state': utils.str_or_none,
'status': utils.str_or_none,
}
'id': int,
'name': utils.str_or_none,
'desired_state': utils.str_or_none,
'state': utils.str_or_none,
'status': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -18,16 +18,16 @@ class service_groups(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': utils.int_or_none,
# 'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none,
'id': utils.int_or_none,
# 'uuid': utils.str_or_none,
# 'deleted': utils.str_or_none,
# 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none,
'state': utils.str_or_none,
'status': utils.str_or_none,
}
# 'created_at': utils.datetime_str_or_none,
# 'updated_at': utils.datetime_str_or_none,
'name': utils.str_or_none,
'state': utils.str_or_none,
'status': utils.str_or_none,
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -18,11 +18,11 @@ class service_group_member(base.Sm_apiObject):
dbapi = db_api.get_instance()
fields = {
'id': utils.int_or_none,
'name': utils.str_or_none,
'service_name': utils.str_or_none,
'service_failure_impact': utils.str_or_none
}
'id': utils.int_or_none,
'name': utils.str_or_none,
'service_name': utils.str_or_none,
'service_failure_impact': utils.str_or_none
}
@staticmethod
def _from_db_object(server, db_server):

View File

@ -105,6 +105,7 @@ def nested_object_or_none(objclass):
def dt_serializer(name):
"""Return a datetime serializer for a named attribute."""
def serializer(self, name=name):
if getattr(self, name) is not None:
return timeutils.isotime(getattr(self, name))

View File

@ -120,7 +120,7 @@ def _import_module(mod_str):
def _is_in_group(opt, group):
"Check if opt is in group."
for key, value in group._opts.items():
for _key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
@ -132,7 +132,7 @@ def _guess_groups(opt, mod_obj):
return 'DEFAULT'
# what other groups is it in?
for key, value in cfg.CONF.items():
for _key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
@ -254,5 +254,6 @@ def main():
sys.exit(0)
generate(sys.argv[1:])
if __name__ == '__main__':
main()

View File

@ -100,7 +100,7 @@ class DBAPI(object):
def __getattr__(self, key):
backend = self.__backend or self.__get_backend()
attr = getattr(backend, key)
if not self.__use_tpool or not hasattr(attr, '__call__'):
if not self.__use_tpool or not callable(attr):
return attr
def tpool_wrapper(*args, **kwargs):

View File

@ -25,6 +25,7 @@ from sm_api.openstack.common.gettextutils import _ # noqa
class DBError(Exception):
"""Wraps an implementation specific exception."""
def __init__(self, inner_exception=None):
self.inner_exception = inner_exception
super(DBError, self).__init__(str(inner_exception))
@ -32,6 +33,7 @@ class DBError(Exception):
class DBDuplicateEntry(DBError):
"""Wraps an implementation specific exception."""
def __init__(self, columns=[], inner_exception=None):
self.columns = columns
super(DBDuplicateEntry, self).__init__(inner_exception)
@ -49,6 +51,7 @@ class DBInvalidUnicodeParameter(Exception):
class DbMigrationError(DBError):
"""Wraps migration specific exception."""
def __init__(self, message=None):
super(DbMigrationError, self).__init__(str(message))

View File

@ -91,7 +91,7 @@ class ModelBase(object):
Includes attributes from joins."""
local = dict(self)
joined = dict([(k, v) for k, v in self.__dict__.items()
if not k[0] == '_'])
if not k[0] == '_'])
local.update(joined)
return iter(local.items())

View File

@ -246,7 +246,6 @@ Efficient use of soft deletes:
"""
import os.path
import re
import time
@ -277,13 +276,13 @@ sqlite_db_opts = [
]
database_opts = [
cfg.StrOpt('connection',
default='sqlite:////var/run/sm/sm.db',
help='The SQLAlchemy connection string used to connect to the '
'database',
deprecated_name='sql_connection',
deprecated_group=DEFAULT,
secret=True),
cfg.StrOpt('connection',
default='sqlite:////var/run/sm/sm.db',
help='The SQLAlchemy connection string used to connect to the '
'database',
deprecated_name='sql_connection',
deprecated_group=DEFAULT,
secret=True),
cfg.IntOpt('idle_timeout',
default=3600,
deprecated_name='sql_idle_timeout',
@ -366,6 +365,7 @@ class SqliteForeignKeysListener(PoolListener):
so the foreign key constraints will be enabled here for every
database connection
"""
def connect(self, dbapi_con, con_record):
dbapi_con.execute('pragma foreign_keys=ON')
@ -627,6 +627,7 @@ def create_engine(sql_connection, sqlite_fk=False):
class Query(sqlalchemy.orm.query.Query):
"""Subclass of sqlalchemy.query with soft_delete() method."""
def soft_delete(self, synchronize_session='evaluate'):
return self.update({'deleted': literal_column('id'),
'updated_at': literal_column('updated_at'),

View File

@ -52,4 +52,4 @@ def save_and_reraise_exception():
logging.error(_('Original exception being dropped: %s'),
traceback.format_exception(type_, value, tb))
raise
raise_(type_,value,tb)
raise_(type_, value, tb)

View File

@ -19,7 +19,6 @@
#
import contextlib
import errno
import os

View File

@ -19,7 +19,6 @@
#
import errno
import functools
import os

View File

@ -367,8 +367,7 @@ def setup(product_name):
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
@ -452,6 +451,7 @@ def _setup_logging_from_conf():
logger = logging.getLogger(mod)
logger.setLevel(level)
_loggers = {}

View File

@ -17,7 +17,6 @@
#
def notify(_context, message):
"""Notifies the recipient of the desired event given the model"""
pass

View File

@ -17,7 +17,6 @@
#
NOTIFICATIONS = []

View File

@ -470,7 +470,7 @@ def _parse_tokenize(rule):
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
for _i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
@ -498,7 +498,7 @@ def _parse_tokenize(rule):
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
for _i in range(trail):
yield ')', ')'

View File

@ -90,7 +90,7 @@ def main():
rawconfig.read(configfile)
config = wrapper.RootwrapConfig(rawconfig)
except ValueError as exc:
msg = "Incorrect value in %s: %s" % (configfile, exc.message)
msg = "Incorrect value in %s: %s" % (configfile, str(exc))
_exit_error(execname, msg, RC_BADCONFIG, log=False)
except configparser.Error:
_exit_error(execname, "Incorrect configuration file: %s" % configfile,

View File

@ -105,7 +105,7 @@ class PathFilter(CommandFilter):
"""
def match(self, userargs):
command, arguments = userargs[0], userargs[1:]
arguments = userargs[1:]
equal_args_num = len(self.args) == len(arguments)
exec_is_valid = super(PathFilter, self).match(userargs)

View File

@ -19,7 +19,6 @@
#
from six.moves import configparser
import logging
import logging.handlers
@ -39,6 +38,7 @@ class FilterMatchNotExecutable(Exception):
This exception is raised when a filter matched but no executable was
found.
"""
def __init__(self, match=None, **kwargs):
self.match = match

View File

@ -66,6 +66,7 @@ LOG = logging.getLogger(__name__)
class Pool(pools.Pool):
"""Class that implements a Pool of Connections."""
def __init__(self, conf, connection_cls, *args, **kwargs):
self.connection_cls = connection_cls
self.conf = conf
@ -188,6 +189,7 @@ class ConnectionContext(rpc_common.Connection):
class ReplyProxy(ConnectionContext):
""" Connection class for RPC replies / callbacks """
def __init__(self, conf, connection_pool):
self._call_waiters = {}
self._num_call_waiters = 0
@ -240,7 +242,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
msg = {'result': reply, 'failure': failure}
except TypeError:
msg = {'result': dict((k, repr(v))
for k, v in reply.__dict__.items()),
for k, v in reply.__dict__.items()),
'failure': failure}
if ending:
msg['ending'] = True
@ -257,6 +259,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call"""
def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None)
@ -517,7 +520,7 @@ class MulticallProxyWaiter(object):
yield result
#TODO(pekowski): Remove MulticallWaiter() in Havana.
# TODO(pekowski): Remove MulticallWaiter() in Havana.
class MulticallWaiter(object):
def __init__(self, conf, connection, timeout):
self._connection = connection

View File

@ -173,6 +173,7 @@ class Connection(object):
An instance of this class should never be created by users of the rpc API.
Use rpc.create_connection() instead.
"""
def close(self):
"""Close the connection.
@ -354,7 +355,9 @@ def deserialize_remote_exception(conf, data):
return RemoteError(name, failure.get('message'), trace)
ex_type = type(failure)
str_override = lambda self: message
def str_override(self):
return message
new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,),
{'__str__': str_override, '__unicode__': str_override})
try:
@ -422,6 +425,7 @@ class ClientException(Exception):
hit by an RPC proxy object. Merely instantiating it records the
current exception information, which will be passed back to the
RPC client without exceptional logging."""
def __init__(self):
self._exc_info = sys.exc_info()

View File

@ -74,7 +74,7 @@ class Consumer(object):
# Caller might have called ctxt.reply() manually
for (reply, failure) in ctxt._response:
if failure:
raise_(failure[0],failure[1],failure[2])
raise_(failure[0], failure[1], failure[2])
res.append(reply)
# if ending not 'sent'...we might have more data to
# return from the function itself

View File

@ -322,6 +322,7 @@ class Publisher(object):
class DirectPublisher(Publisher):
"""Publisher class for 'direct'"""
def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher.
@ -338,6 +339,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher):
"""Publisher class for 'topic'"""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher.
@ -357,6 +359,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'"""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher.
@ -482,7 +485,7 @@ class Connection(object):
"""
if self.connection:
LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params)
"%(hostname)s:%(port)d") % params)
try:
self.connection.release()
except self.connection_errors:
@ -612,7 +615,7 @@ class Connection(object):
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback,
@ -673,7 +676,7 @@ class Connection(object):
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
"'%(topic)s': %(err_str)s") % log_info)
def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs)

View File

@ -239,6 +239,7 @@ class Publisher(object):
class DirectPublisher(Publisher):
"""Publisher class for 'direct'"""
def __init__(self, conf, session, msg_id):
"""Init a 'direct' publisher."""
super(DirectPublisher, self).__init__(session, msg_id,
@ -247,6 +248,7 @@ class DirectPublisher(Publisher):
class TopicPublisher(Publisher):
"""Publisher class for 'topic'"""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
@ -257,6 +259,7 @@ class TopicPublisher(Publisher):
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'"""
def __init__(self, conf, session, topic):
"""init a 'fanout' publisher.
"""
@ -267,6 +270,7 @@ class FanoutPublisher(Publisher):
class NotifyPublisher(Publisher):
"""Publisher class for notifications"""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
@ -406,7 +410,7 @@ class Connection(object):
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.session, topic, callback)
@ -460,7 +464,7 @@ class Connection(object):
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
"'%(topic)s': %(err_str)s") % log_info)
def _publisher_send():
publisher = cls(self.conf, self.session, topic)

View File

@ -228,13 +228,13 @@ class ZmqClient(object):
if not envelope:
self.outq.send(map(bytes,
(msg_id, topic, 'cast', _serialize(data))))
(msg_id, topic, 'cast', _serialize(data))))
return
rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items())
self.outq.send(map(bytes,
(msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
(msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
def close(self):
self.outq.close()
@ -242,6 +242,7 @@ class ZmqClient(object):
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.replies = []
super(RpcContext, self).__init__(**kwargs)
@ -331,7 +332,7 @@ class ConsumerBase(object):
@classmethod
def normalize_reply(self, result, replies):
#TODO(ewindisch): re-evaluate and document this method.
# TODO(ewindisch): re-evaluate and document this method.
if isinstance(result, types.GeneratorType):
return list(result)
elif replies:
@ -451,7 +452,7 @@ class ZmqProxy(ZmqBaseReactor):
def consume(self, sock):
ipc_dir = CONF.rpc_zmq_ipc_dir
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
# TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv()
topic = data[1]
@ -576,7 +577,7 @@ class ZmqReactor(ZmqBaseReactor):
super(ZmqReactor, self).__init__(conf)
def consume(self, sock):
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
# TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv()
LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data)
if sock in self.mapping:

View File

@ -56,6 +56,7 @@ class Exchange(object):
Implements lookups.
Subclass this to support hashtables, dns, etc.
"""
def __init__(self):
pass
@ -67,6 +68,7 @@ class Binding(object):
"""
A binding on which to perform a lookup.
"""
def __init__(self):
pass
@ -80,6 +82,7 @@ class MatchMakerBase(object):
Build off HeartbeatMatchMakerBase if building a
heartbeat-capable MatchMaker.
"""
def __init__(self):
# Array of tuples. Index [2] toggles negation, [3] is last-if-true
self.bindings = []
@ -145,9 +148,9 @@ class MatchMakerBase(object):
def add_binding(self, binding, rule, last=True):
self.bindings.append((binding, rule, False, last))
#NOTE(ewindisch): kept the following method in case we implement the
# NOTE(ewindisch): kept the following method in case we implement the
# underlying support.
#def add_negate_binding(self, binding, rule, last=True):
# def add_negate_binding(self, binding, rule, last=True):
# self.bindings.append((binding, rule, True, last))
def queues(self, key):
@ -155,7 +158,7 @@ class MatchMakerBase(object):
# bit is for negate bindings - if we choose to implement it.
# last stops processing rules if this matches.
for (binding, exchange, bit, last) in self.bindings:
for (binding, exchange, _bit, last) in self.bindings:
if binding.test(key):
workers.extend(exchange.run(key))
@ -171,6 +174,7 @@ class HeartbeatMatchMakerBase(MatchMakerBase):
Provides common methods for registering,
unregistering, and maintaining heartbeats.
"""
def __init__(self):
self.hosts = set()
self._heart = None
@ -269,6 +273,7 @@ class DirectBinding(Binding):
Although dots are used in the key, the behavior here is
that it maps directly to a host, thus direct.
"""
def test(self, key):
if '.' in key:
return True
@ -283,6 +288,7 @@ class TopicBinding(Binding):
that of a topic exchange (whereas where there are dots, behavior
matches that of a direct exchange.
"""
def test(self, key):
if '.' not in key:
return True
@ -291,6 +297,7 @@ class TopicBinding(Binding):
class FanoutBinding(Binding):
"""Match on fanout keys, where key starts with 'fanout.' string."""
def test(self, key):
if key.startswith('fanout~'):
return True
@ -299,12 +306,14 @@ class FanoutBinding(Binding):
class StubExchange(Exchange):
"""Exchange that does nothing."""
def run(self, key):
return [(key, None)]
class LocalhostExchange(Exchange):
"""Exchange where all direct topics are local."""
def __init__(self, host='localhost'):
self.host = host
super(Exchange, self).__init__()
@ -318,6 +327,7 @@ class DirectExchange(Exchange):
Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute.host" running on "host"
"""
def __init__(self):
super(Exchange, self).__init__()
@ -331,6 +341,7 @@ class MatchMakerLocalhost(MatchMakerBase):
Match Maker where all bare topics resolve to localhost.
Useful for testing.
"""
def __init__(self, host='localhost'):
super(MatchMakerLocalhost, self).__init__()
self.add_binding(FanoutBinding(), LocalhostExchange(host))
@ -344,6 +355,7 @@ class MatchMakerStub(MatchMakerBase):
Useful for testing, or for AMQP/brokered queues.
Will not work where knowledge of hosts is known (i.e. zeromq)
"""
def __init__(self):
super(MatchMakerLocalhost, self).__init__()

View File

@ -63,6 +63,7 @@ class RedisTopicExchange(RedisExchange):
Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute" running on "host"
"""
def run(self, topic):
while True:
member_name = self.redis.srandmember(topic)
@ -84,11 +85,12 @@ class RedisFanoutExchange(RedisExchange):
"""
Return a list of all hosts.
"""
def run(self, topic):
topic = topic.split('~', 1)[1]
hosts = self.redis.smembers(topic)
good_hosts = [host for host in hosts if
self.matchmaker.is_alive(topic, host)]
self.matchmaker.is_alive(topic, host)]
return [(x, x.split('.', 1)[1]) for x in good_hosts]
@ -97,6 +99,7 @@ class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase):
"""
MatchMaker registering and looking-up hosts with a Redis server.
"""
def __init__(self):
super(MatchMakerRedis, self).__init__()

View File

@ -54,6 +54,7 @@ class RingExchange(mm.Exchange):
__init__ takes optional ring dictionary argument, otherwise
loads the ringfile from CONF.mathcmaker_ringfile.
"""
def __init__(self, ring=None):
super(RingExchange, self).__init__()
@ -76,6 +77,7 @@ class RingExchange(mm.Exchange):
class RoundRobinRingExchange(RingExchange):
"""A Topic Exchange based on a hashmap."""
def __init__(self, ring=None):
super(RoundRobinRingExchange, self).__init__(ring)
@ -92,6 +94,7 @@ class RoundRobinRingExchange(RingExchange):
class FanoutRingExchange(RingExchange):
"""Fanout Exchange based on a hashmap."""
def __init__(self, ring=None):
super(FanoutRingExchange, self).__init__(ring)
@ -111,6 +114,7 @@ class MatchMakerRing(mm.MatchMakerBase):
"""
Match Maker where hosts are loaded from a static hashmap.
"""
def __init__(self, ring=None):
super(MatchMakerRing, self).__init__()
self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring))

View File

@ -35,6 +35,7 @@ class Service(service.Service):
"""Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host."""
def __init__(self, host, topic, manager=None, serializer=None):
super(Service, self).__init__()
self.host = host

View File

@ -82,12 +82,12 @@ def parse_requirements(requirements_files=['requirements.txt',
# -e git://github.com/openstack/nova/master#egg=nova
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
line))
line))
# such as:
# http://github.com/openstack/nova/zipball/master#egg=nova
elif re.match(r'\s*https?:', line):
requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
line))
line))
# -f lines are for index locations, and don't get used here
elif re.match(r'\s*-f\s+', line):
pass

View File

@ -42,6 +42,7 @@ class Thread(object):
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
it has done so it can be removed from the threads list.
"""
def __init__(self, thread, group):
self.thread = thread
self.thread.link(_thread_done, group=group, thread=self)
@ -60,6 +61,7 @@ class ThreadGroup(object):
when need be).
* provide an easy API to add timers.
"""
def __init__(self, thread_pool_size=10):
self.pool = greenpool.GreenPool(thread_pool_size)
self.threads = []

View File

@ -52,9 +52,9 @@ def parse_isotime(timestr):
try:
return iso8601.parse_date(timestr)
except iso8601.ParseError as e:
raise ValueError(e.message)
raise ValueError(str(e))
except TypeError as e:
raise ValueError(e.message)
raise ValueError(str(e))
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
@ -124,7 +124,7 @@ def set_time_override(override_time=datetime.datetime.utcnow()):
def advance_time_delta(timedelta):
"""Advance overridden time using a datetime.timedelta."""
assert(not utcnow.override_time is None)
assert(utcnow.override_time is not None)
try:
for dt in utcnow.override_time:
dt += timedelta

View File

@ -8,6 +8,7 @@
from sm_client.common import utils
from keystoneclient.v3 import client as ksclient
def _get_ksclient(**kwargs):
"""Get an endpoint and auth token from Keystone.

View File

@ -92,6 +92,7 @@ class Resource(object):
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
def __init__(self, manager, info, loaded=False):
self.manager = manager
self._info = info

View File

@ -28,7 +28,7 @@ from six.moves.urllib.parse import urlparse
try:
import ssl
except ImportError:
#TODO(bcwaldon): Handle this failure more gracefully
# TODO(bcwaldon): Handle this failure more gracefully
pass
try:

View File

@ -17,6 +17,7 @@
#
from __future__ import print_function
import argparse
import os
import sys
@ -98,7 +99,7 @@ def print_list(objs, fields, field_labels, formatters={}, sortby=0):
data = getattr(o, field, '')
row.append(data)
pt.add_row(row)
print pt.get_string(sortby=field_labels[sortby])
print(pt.get_string(sortby=field_labels[sortby]))
def print_tuple_list(tuples, tuple_labels=[]):
@ -111,11 +112,11 @@ def print_tuple_list(tuples, tuple_labels=[]):
if len(t) == 2:
pt.add_row([t[0], t[1]])
else:
for t,l in zip(tuples,tuple_labels):
for t, l in zip(tuples, tuple_labels):
if len(t) == 2:
pt.add_row([l, t[1]])
print pt.get_string()
print(pt.get_string())
def print_mapping(data, fields, dict_property="Property", wrap=0):
@ -142,7 +143,8 @@ def print_mapping(data, fields, dict_property="Property", wrap=0):
col1 = ''
else:
pt.add_row([k, v])
print pt.get_string()
print(pt.get_string())
def print_dict(d, fields, dict_property="Property", wrap=0):
pt = prettytable.PrettyTable([dict_property, 'Value'],
@ -169,7 +171,7 @@ def print_dict(d, fields, dict_property="Property", wrap=0):
col1 = ''
else:
pt.add_row([k, v])
print pt.get_string()
print(pt.get_string())
def find_resource(manager, name_or_id):
@ -258,5 +260,5 @@ def args_array_to_patch(op, attributes):
def exit(msg=''):
if msg:
print >> sys.stderr, msg
print(msg, file=sys.stderr)
sys.exit(1)

View File

@ -10,6 +10,7 @@ import sys
class BaseException(Exception):
"""An error occurred."""
def __init__(self, message=None):
self.message = message
@ -164,6 +165,7 @@ class AmbiguousAuthSystem(ClientException):
"""Could not obtain token and endpoint using provided credentials."""
pass
# Alias for backwards compatibility
AmbigiousAuthSystem = AmbiguousAuthSystem

View File

@ -120,7 +120,7 @@ def _import_module(mod_str):
def _is_in_group(opt, group):
"Check if opt is in group."
for key, value in group._opts.items():
for _key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
@ -132,7 +132,7 @@ def _guess_groups(opt, mod_obj):
return 'DEFAULT'
# what other groups is it in?
for key, value in cfg.CONF.items():
for _key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
@ -254,5 +254,6 @@ def main():
sys.exit(0)
generate(sys.argv[1:])
if __name__ == '__main__':
main()

View File

@ -79,7 +79,7 @@ def main():
rawconfig.read(configfile)
config = wrapper.RootwrapConfig(rawconfig)
except ValueError as exc:
msg = "Incorrect value in %s: %s" % (configfile, exc.message)
msg = "Incorrect value in %s: %s" % (configfile, str(exc))
_exit_error(execname, msg, RC_BADCONFIG, log=False)
except configparser.Error:
_exit_error(execname, "Incorrect configuration file: %s" % configfile,

View File

@ -105,7 +105,7 @@ class PathFilter(CommandFilter):
"""
def match(self, userargs):
command, arguments = userargs[0], userargs[1:]
arguments = userargs[1:]
equal_args_num = len(self.args) == len(arguments)
exec_is_valid = super(PathFilter, self).match(userargs)

View File

@ -19,7 +19,6 @@
#
from six.moves import configparser
import logging
import logging.handlers
@ -40,6 +39,7 @@ class FilterMatchNotExecutable(Exception):
This exception is raised when a filter matched but no executable was
found.
"""
def __init__(self, match=None, **kwargs):
self.match = match

View File

@ -8,7 +8,7 @@
"""
Command-line interface for Service Manager (SM)
"""
from __future__ import print_function
import argparse
import httplib2
import logging
@ -131,7 +131,7 @@ class SmcShell(object):
parser.add_argument('--smc-api-version',
default=utils.env(
'SMC_API_VERSION', default='1'),
'SMC_API_VERSION', default='1'),
help='Defaults to env[SMC_API_VERSION] '
'or 1')
@ -203,8 +203,8 @@ class SmcShell(object):
httplib2.debuglevel = 1
else:
logging.basicConfig(
format="%(levelname)s %(message)s",
level=logging.CRITICAL)
format="%(levelname)s %(message)s",
level=logging.CRITICAL)
def main(self, argv):
# Parse args once to find version
@ -249,7 +249,7 @@ class SmcShell(object):
client = cgclient.get_client(api_version, **(args.__dict__))
nargs = args.__dict__
# nargs = args.__dict__
# nargs['neutron_endpoint'] = client.neutron_endpoint
# client.neutronClient = get_neutron_client(**nargs)
@ -284,8 +284,9 @@ def main():
SmcShell().main(sys.argv[1:])
except Exception as e:
print >> sys.stderr, e
print(e, file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -16,7 +16,7 @@ COMMAND_MODULES = [
smc_service_shell,
smc_service_node_shell,
smc_servicegroup_shell,
]
]
def enhance_parser(parser, subparsers, cmd_mapper):

View File

@ -6,7 +6,6 @@
# SPDX-License-Identifier: Apache-2.0
#
#
from sm_client import exc
from sm_client.common import base

View File

@ -7,9 +7,9 @@
# SPDX-License-Identifier: Apache-2.0
#
#
from sm_client import exc
from sm_client.common import base
class SmcNode(base.Resource):
def __repr__(self):
return "<SmcNode %s>" % self._info

View File

@ -16,6 +16,7 @@ def _print_sm_service_node_show(node):
'availability_status', 'ready_state']
utils.print_mapping(node, fields, wrap=72)
def do_servicenode_list(cc, args):
"""List Service Nodes."""
try:
@ -44,6 +45,6 @@ def do_servicenode_show(cc, args):
"requires 'admin' level")
else:
if node is None:
print "Service node %s could not be found" % args.node
print("Service node %s could not be found" % args.node)
return
_print_sm_service_node_show(node)

View File

@ -53,7 +53,7 @@ def do_service_show(cc, args):
"requires 'admin' level")
else:
if service is None:
print "Service %s could not be found" % args.service
print("Service %s could not be found" % args.service)
return
if service.status:
setattr(service, 'state', service.state + '-' + service.status)

View File

@ -7,7 +7,6 @@
# SPDX-License-Identifier: Apache-2.0
#
#
from sm_client import exc
from sm_client.common import base

View File

@ -45,7 +45,7 @@ def do_servicegroup_show(cc, args):
"requires 'admin' level")
else:
if servicegroup is None:
print "Service group %s could not be found" % args.servicegroup
print("Service group %s could not be found" % args.servicegroup)
return
if servicegroup.status:
setattr(servicegroup, 'state', servicegroup.state + '-' +

View File

@ -101,7 +101,7 @@ class InstallVenv(object):
print('Creating venv...', end=' ')
if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv])
self.venv])
else:
self.run_command(['virtualenv', '-q', self.venv])
print('done.')
@ -111,7 +111,7 @@ class InstallVenv(object):
def pip_install(self, *args):
self.run_command(['tools/with_venv.sh',
'pip', 'install', '--upgrade'] + list(args),
'pip', 'install', '--upgrade'] + list(args),
redirect_output=False)
def install_dependencies(self):
@ -142,7 +142,7 @@ class Distro(InstallVenv):
def check_cmd(self, cmd):
return bool(self.run_command(['which', cmd],
check_exit_code=False).strip())
check_exit_code=False).strip())
def install_virtualenv(self):
if self.check_cmd('virtualenv'):

View File

@ -49,7 +49,7 @@ def main():
si_parser.add_argument('parameters', help='instance parameters')
sys_parser = subparsers.add_parser('system',
help='system Configuration')
help='system Configuration')
sys_parser.set_defaults(which='system')
sys_parser.add_argument(
"--cpe_mode", choices=[cpe_duplex, cpe_duplex_direct],

View File

@ -20,7 +20,7 @@ def get_pid(pid_file):
pid = f.readline().strip('\n ')
try:
pid = int(pid)
except:
except Exception as e:
pid = -1
return pid
return -1
@ -39,7 +39,7 @@ def get_process_name(pid):
name = ntpath.basename(cmd_line[1])
return name
except:
except Exception as e:
# most likely it is a leftover pid
return ''
@ -112,7 +112,7 @@ def main():
pid_file = row[5]
pid = get_pid(pid_file)
pn = get_process_name(pid)
msg = "%-32s %-20s %-20s " % (row[0], row[1],row[2])
msg = "%-32s %-20s %-20s " % (row[0], row[1], row[2])
if args.impact:
msg += "%-10s" % (row[6])
if args.pid:
@ -122,7 +122,7 @@ def main():
if args.pid_file:
msg += "%-25s" % (pid_file)
msg += "%-10s %20s" % (row[3], row[4])
print msg
print(msg)
print("%s" % ('-' * len))
@ -168,7 +168,7 @@ def main():
pid_file = row[4]
pid = get_pid(pid_file)
pn = get_process_name(pid)
msg = "%-32s %-20s %-20s " % (row[0], row[1],row[2])
msg = "%-32s %-20s %-20s " % (row[0], row[1], row[2])
if args.impact:
msg += "%-10s" % (row[5])
if args.pid:
@ -193,9 +193,9 @@ def main():
try:
sys.stdout.close()
except:
except Exception as e:
pass
try:
sys.stderr.close()
except:
except Exception as e:
pass

View File

@ -64,7 +64,7 @@ def main():
cursor.execute("SELECT NAME, DESIRED_STATE, STATE FROM "
"SERVICE_GROUPS WHERE NAME IN (%s) AND PROVISIONED='yes';"
% ','.join("'%s'"%i for i in args.service_group_name))
% ','.join("'%s'" % i for i in args.service_group_name))
rows = cursor.fetchall()
@ -90,7 +90,7 @@ def main():
not_found_list.append(g)
if len(not_found_list) > 1:
print("%s are not provisioned"%','.join( (g for g in not_found_list)))
print("%s are not provisioned" % ','.join((g for g in not_found_list)))
elif len(not_found_list) == 1:
print("%s is not provisioned" % ','.join((g for g in not_found_list)))

View File

@ -11,7 +11,7 @@
# SERVICE_GROUP_AGGREGATE_DESIRED_STATE=<service-group-state>
# SERVICE_GROUP_AGGREGATE_STATE=<service-group-state>
#
# SERVICE_GROUP_NAME=<service-group-name>
# SERVICE_GROUP_NAME=<service-group-name>
# SERVICE_GROUP_DESIRED_STATE=<service-group-state>
# SERVICE_GROUP_STATE=<service-group-state>
# SERVICE_GROUP_NOTIFICATION=<service-group-notification>

45
tox.ini
View File

@ -51,40 +51,11 @@ commands =
[flake8]
# The following are being suppressed for now
# E114 indentation is not a multiple of four (comment)
# E116 unexpected indentation (comment)
# E121 continuation line under-indented for hanging indent
# E122 continuation line missing indentation or outdented
# E123 closing bracket does not match indentation of opening bracket's line
# E124 closing bracket does not match visual indentation
# E125 continuation line with same indent as next logical line
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line
# E131 continuation line unaligned for hanging indent
# E201 whitespace after '('
# E228 missing whitespace around modulo operator
# E231 missing whitespace after ','
# E241 multiple spaces after ':'
# E251 unexpected spaces around keyword / parameter equals
# E265 block comment should start with '#'
# E271 multiple spaces after keyword
# E302 expected 2 blank lines, found 1
# E303 too many blank lines
# E305 expected 2 blank lines after class or function definition, found 1
# E402 module level import not at top of file
# E713 test for membership should be 'not in'
# E714 test for object identity should be 'is not'
# E722 do not use bare except'
# E731 do not assign a lambda expression, use a def
# E999 SyntaxError: invalid syntax (this is likely python3)
# - hacking codes -
# H102: license header not found
# H104: File contains nothing but comments
# H105: Don't use author tags
# H201: no 'except:'
# H233: Python 3.x incompatible use of print operator
# H306: imports not in alphabetical order
# H401: docstring should not start with a space
# H403: multi line docstrings should end on a new line
@ -92,23 +63,15 @@ commands =
# H405: multi line docstring summary not separated with an empty line
# H501: Do not use locals() for string formatting
# - errors -
# F401 <foo> imported but unused
# F811 redefinition of unused '<foo>' from line <x>
# F821 undefined name 'e'
# F841 local variable 'foo' is assigned to but never used
# - bugbear -
# B001 Do not use bare `except:`. Prefer `except Exception:`. If you're sure what you're doing, be explicit and write `except BaseException:`.
# B004 Using `hasattr(x, '__call__')` to test if `x` is callable is unreliable.
# B006 Do not use mutable data structures for argument defaults. (python3)
# B007 Loop control variable 'key' not used within the loop body. If this is intended, start the name with an underscore.
# B008 Do not perform calls in argument defaults. The call is performed only once at function definition time.
# B305 `.next()` is not a thing on Python 3. Use the `next()` builtin. For Python 2 compatibility, use `six.next()`.
# B306 `BaseException.message` has been deprecated as of Python 2.6 and is removed in Python 3.
ignore= E114,E116,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E228,E231,E241,E251,E265,E271,
E302,E303,E305,E402,E713,E714,E722,E731,E999,
H102,H104,H105,H106,H201,H233,H306,H401,H403,H404,H405,H501,
F401,F811,F821,F841,
B001,B004,B006,B007,B008,B305,B306
ignore= E402,
H102,H104,H105,H106,H306,H401,H403,H404,H405,H501,
F811,F821,
B006,B008
# Enable checks which are off by default
# H106 Dont put vim configuration in source files (off by default). SHOULD BE ENABLED.
# H203 Use assertIs(Not)None to check for None (off by default).