py3: Convert repo to use python3
Update code using modernize tool for python 3 compatibility. Unit test run successfull. Story: 2008454 Task: 42840 off-by: Anderson F. de Rossi <Anderson.FrancicadeRossi@windriver.com> Change-Id: I1ce242e84200f64929219a8585b6740fe9cdc672 Signed-off-by: Anderson F. de Rossi <Anderson.FrancicadeRossi@windriver.com>
This commit is contained in:
parent
49890b097d
commit
9cac55862b
|
@ -400,7 +400,7 @@ class SubcloudInstall(object):
|
|||
LOG.info("Prepare for %s remote install" % (self.name))
|
||||
iso_values = {}
|
||||
for k in install_consts.MANDATORY_INSTALL_VALUES:
|
||||
if k in GEN_ISO_OPTIONS.keys():
|
||||
if k in list(GEN_ISO_OPTIONS.keys()):
|
||||
iso_values[k] = payload.get(k)
|
||||
if k not in BMC_OPTIONS:
|
||||
iso_values[k] = payload.get(k)
|
||||
|
|
|
@ -230,8 +230,11 @@ def user_get_all(context):
|
|||
user_passwords = {'password': [password for password in passwords
|
||||
if password['local_user_id'] ==
|
||||
local_user['id']]}
|
||||
user_consolidated = dict({'local_user': local_user}.items() +
|
||||
user.items() + user_passwords.items())
|
||||
|
||||
user_consolidated = local_user
|
||||
user_consolidated.update(user)
|
||||
user_consolidated.update(user_passwords)
|
||||
|
||||
result.append(user_consolidated)
|
||||
|
||||
return result
|
||||
|
|
|
@ -74,8 +74,7 @@ class ResourceManager(object):
|
|||
json_objects = [json_response_key[item] for item in json_response_key]
|
||||
resource = []
|
||||
for json_object in json_objects:
|
||||
data = json_object.get('usage').keys()
|
||||
for values in data:
|
||||
for values in json_object.get('usage').keys():
|
||||
resource.append(self.resource_class( # pylint: disable=E1102
|
||||
self, values,
|
||||
json_object['limits'][values],
|
||||
|
|
|
@ -55,6 +55,7 @@ from dcmanager.db import api as db_api
|
|||
|
||||
from dcmanager.rpc import client as rpc_client
|
||||
from dcorch.common import consts as dcorch_consts
|
||||
from six.moves import range
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -518,7 +519,7 @@ class SubcloudsController(object):
|
|||
pecan.abort(400, _('Mandatory install value %s not present') % k)
|
||||
|
||||
if (install_values['install_type'] not in
|
||||
range(install_consts.SUPPORTED_INSTALL_TYPES)):
|
||||
list(range(install_consts.SUPPORTED_INSTALL_TYPES))):
|
||||
pecan.abort(400, _("install_type invalid: %s") %
|
||||
install_values['install_type'])
|
||||
|
||||
|
|
|
@ -136,7 +136,7 @@ class GenericSyncManager(object):
|
|||
random.shuffle(subclouds)
|
||||
sc_names = []
|
||||
for sc in subclouds:
|
||||
if sc.region_name in self.sync_objs.keys():
|
||||
if sc.region_name in self.sync_objs:
|
||||
sc_names.append(sc.region_name)
|
||||
for ept in self.sync_objs[sc.region_name].keys():
|
||||
try:
|
||||
|
@ -556,7 +556,7 @@ class GenericSyncManager(object):
|
|||
# randomize to reduce likelihood of sync_lock contention
|
||||
random.shuffle(subclouds)
|
||||
for sc in subclouds:
|
||||
if sc.region_name in self.sync_objs.keys():
|
||||
if sc.region_name in list(self.sync_objs.keys()):
|
||||
for e in self.sync_objs[sc.region_name].keys():
|
||||
LOG.debug("Attempt audit_subcloud: %s/%s/%s",
|
||||
engine_id, sc.region_name, e)
|
||||
|
@ -574,7 +574,7 @@ class GenericSyncManager(object):
|
|||
capabilities.update({'endpoint_types': endpoint_type_list})
|
||||
self.create_sync_objects(sc.region_name, capabilities)
|
||||
# self.sync_objs stores the sync object per endpoint
|
||||
if sc.region_name in self.sync_objs.keys():
|
||||
if sc.region_name in list(self.sync_objs.keys()):
|
||||
for e in self.sync_objs[sc.region_name].keys():
|
||||
LOG.debug("Attempt audit_subcloud: %s/%s/%s",
|
||||
engine_id, sc.region_name, e)
|
||||
|
|
|
@ -297,7 +297,7 @@ class ComputeSyncThread(SyncThread):
|
|||
consts.ACTION_EXTRASPECS_DELETE: self.unset_extra_specs,
|
||||
}
|
||||
action = list(action_dict.keys())[0]
|
||||
if action not in switcher.keys():
|
||||
if action not in list(switcher.keys()):
|
||||
LOG.error("Unsupported flavor action {}".format(action),
|
||||
extra=self.log_extra)
|
||||
return
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import base64
|
||||
from collections import namedtuple
|
||||
|
||||
|
@ -453,7 +452,7 @@ class IdentitySyncThread(SyncThread):
|
|||
raise exceptions.SyncRequestFailed
|
||||
|
||||
user_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if 'user' in user_dict.keys():
|
||||
if 'user' in user_dict:
|
||||
user_dict = user_dict['user']
|
||||
|
||||
sc_user_id = user_dict.pop('id', None)
|
||||
|
@ -496,7 +495,7 @@ class IdentitySyncThread(SyncThread):
|
|||
def patch_users(self, request, rsrc):
|
||||
# Update user reference on this subcloud
|
||||
user_update_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if not user_update_dict.keys():
|
||||
if not list(user_update_dict.keys()):
|
||||
LOG.error("Received user update request "
|
||||
"without any update fields", extra=self.log_extra)
|
||||
raise exceptions.SyncRequestFailed
|
||||
|
@ -629,7 +628,7 @@ class IdentitySyncThread(SyncThread):
|
|||
raise exceptions.SyncRequestFailed
|
||||
|
||||
project_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if 'project' in project_dict.keys():
|
||||
if 'project' in list(project_dict.keys()):
|
||||
project_dict = project_dict['project']
|
||||
|
||||
sc_project_id = project_dict.pop('id', None)
|
||||
|
@ -672,7 +671,7 @@ class IdentitySyncThread(SyncThread):
|
|||
def patch_projects(self, request, rsrc):
|
||||
# Update project on this subcloud
|
||||
project_update_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if not project_update_dict.keys():
|
||||
if not list(project_update_dict.keys()):
|
||||
LOG.error("Received project update request "
|
||||
"without any update fields", extra=self.log_extra)
|
||||
raise exceptions.SyncRequestFailed
|
||||
|
@ -800,7 +799,7 @@ class IdentitySyncThread(SyncThread):
|
|||
raise exceptions.SyncRequestFailed
|
||||
|
||||
role_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if 'role' in role_dict.keys():
|
||||
if 'role' in list(role_dict.keys()):
|
||||
role_dict = role_dict['role']
|
||||
|
||||
sc_role_id = role_dict.pop('id', None)
|
||||
|
@ -843,7 +842,7 @@ class IdentitySyncThread(SyncThread):
|
|||
def patch_roles(self, request, rsrc):
|
||||
# Update this role on this subcloud
|
||||
role_update_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if not role_update_dict.keys():
|
||||
if not list(role_update_dict.keys()):
|
||||
LOG.error("Received role update request "
|
||||
"without any update fields", extra=self.log_extra)
|
||||
raise exceptions.SyncRequestFailed
|
||||
|
@ -1062,7 +1061,7 @@ class IdentitySyncThread(SyncThread):
|
|||
# JSON format by REST call to the DB synchronization service on this
|
||||
# subcloud, which then inserts the resource records into DB tables.
|
||||
revoke_event_dict = jsonutils.loads(request.orch_job.resource_info)
|
||||
if 'token_revoke_event' in revoke_event_dict.keys():
|
||||
if 'token_revoke_event' in list(revoke_event_dict.keys()):
|
||||
revoke_event_dict = revoke_event_dict['token_revoke_event']
|
||||
|
||||
audit_id = revoke_event_dict.pop('audit_id', None)
|
||||
|
@ -1381,7 +1380,7 @@ class IdentitySyncThread(SyncThread):
|
|||
# Build an opaque object wrapper for this RoleAssignment
|
||||
refactored_assignment = namedtuple(
|
||||
'RoleAssignmentWrapper',
|
||||
assignment_dict.keys())(*assignment_dict.values())
|
||||
list(assignment_dict.keys()))(*list(assignment_dict.values()))
|
||||
refactored_assignments.append(refactored_assignment)
|
||||
|
||||
return refactored_assignments
|
||||
|
|
Loading…
Reference in New Issue