Repository URL to install this package:
Version:
5.2.8 ▾
|
# Copyright (c) 2014 TrilioData, Inc.
# All Rights Reserved.
"""
Base utilities to build API operation managers and objects on top of.
"""
import abc
import contextlib
import hashlib
import os
import yaml
import six
from workloadmgrclient import exceptions
from workloadmgrclient import utils
# Python 2.4 compat
try:
all
except NameError:
def all(iterable):
return True not in (not x for x in iterable)
def getid(obj):
"""
Abstracts the common pattern of allowing both an object or an object's ID
as a parameter when dealing with relationships.
"""
try:
return obj.id
except AttributeError:
return obj
class Manager(utils.HookableMixin):
"""
Managers interact with a particular type of API (servers, flavors, images,
etc.) and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, api):
self.api = api
def _list(self, url, response_key, obj_class=None, body=None):
resp = None
if body:
resp, body = self.api.client.post(url, body=body)
else:
resp, body = self.api.client.get(url)
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
if isinstance(data, dict):
try:
data = data["values"]
except KeyError:
pass
with self.completion_cache("human_id", obj_class, mode="w"):
with self.completion_cache("uuid", obj_class, mode="w"):
return [obj_class(self, res, loaded=True) for res in data if res]
@contextlib.contextmanager
def completion_cache(self, cache_type, obj_class, mode):
"""
The completion cache store items that can be used for bash
autocompletion, like UUIDs or human-friendly IDs.
A resource listing will clear and repopulate the cache.
A resource create will append to the cache.
Delete is not handled because listings are assumed to be performed
often enough to keep the cache reasonably up-to-date.
"""
base_dir = utils.env(
"WORKLOADMGRCLIENT_UUID_CACHE_DIR", default="~/.workloadmgrclient"
)
# NOTE(sirp): Keep separate UUID caches for each username + endpoint
# pair
username = utils.env("OS_USERNAME", "WORKLOADMGR_USERNAME").encode("utf-8")
url = utils.env("OS_URL", "WORKLOADMGR_URL").encode("utf-8")
uniqifier = hashlib.md5(username + url).hexdigest()
cache_dir = os.path.expanduser(os.path.join(base_dir, uniqifier))
try:
os.makedirs(cache_dir, 0o755)
except OSError:
# NOTE(kiall): This is typicaly either permission denied while
# attempting to create the directory, or the directory
# already exists. Either way, don't fail.
pass
resource = obj_class.__name__.lower()
filename = "%s-%s-cache" % (resource, cache_type.replace("_", "-"))
path = os.path.join(cache_dir, filename)
cache_attr = "_%s_cache" % cache_type
try:
setattr(self, cache_attr, open(path, mode))
except IOError:
# NOTE(kiall): This is typicaly a permission denied while
# attempting to write the cache file.
pass
try:
yield
finally:
cache = getattr(self, cache_attr, None)
if cache:
cache.close()
delattr(self, cache_attr)
def write_to_completion_cache(self, cache_type, val):
cache = getattr(self, "_%s_cache" % cache_type, None)
if cache:
cache.write("%s\n" % val)
def _get(self, url, response_key=None):
resp, body = self.api.client.get(url)
if response_key:
return self.resource_class(self, body[response_key], loaded=True)
else:
return self.resource_class(self, body, loaded=True)
def _create(self, url, body, response_key, return_raw=False, **kwargs):
self.run_hooks("modify_body_for_create", body, **kwargs)
resp, body = self.api.client.post(url, body=body)
if return_raw:
return body[response_key]
with self.completion_cache("human_id", self.resource_class, mode="a"):
with self.completion_cache("uuid", self.resource_class, mode="a"):
return self.resource_class(self, body[response_key])
def _delete(self, url):
resp, body = self.api.client.delete(url)
def _cancel(self, url):
resp, body = self.api.client.get(url)
def _post(self, url, body):
resp, body = self.api.client.post(url, body=body)
return body
def _mount(self, url, body):
resp, body = self.api.client.post(url, body=body)
return resp.content
def _update(self, url, body):
resp, body = self.api.client.put(url, body=body)
return body
def _discover_instances(self, url, body=None):
if body is None:
resp, body = self.api.client.post(url)
else:
resp, body = self.api.client.post(url, body=body)
return body
def _get_without_id(self, url, response_key=None):
resp, body = self.api.client.get(url)
if response_key:
return body[response_key]
else:
return body
def _pause(self, url):
resp, body = self.api.client.post(url)
def _resume(self, url):
resp, body = self.api.client.post(url)
def _unlock(self, url):
resp, body = self.api.client.post(url)
def _reset(self, url):
resp, body = self.api.client.post(url)
def _discovervms(self, url):
resp, body = self.api.client.post(url)
def _import(self, url, response_key, method="import", workloadids=[], upgrade=True):
if method == "list":
resp, body = self.api.client.get(url)
else:
body = {}
if len(workloadids) > 0:
body = {"workload_ids": workloadids, "upgrade": upgrade}
else:
body = {"upgrade": upgrade}
resp, body = self.api.client.post(url, body=body)
data = body[response_key]
if isinstance(data, dict):
try:
data = data["values"]
except KeyError:
pass
return data
def _import_migration_plans(self, url, response_key, method="import", migration_plan_ids=[], upgrade=True):
if method == "list":
resp, body = self.api.client.get(url)
else:
body = {}
if len(migration_plan_ids) > 0:
body = {"migration_plan_ids": migration_plan_ids, "upgrade": upgrade}
else:
body = {"upgrade": upgrade}
resp, body = self.api.client.post(url, body=body)
data = body[response_key]
if isinstance(data, dict):
try:
data = data["values"]
except KeyError:
pass
return data
def _get_nodes(self, url):
resp, body = self.api.client.get(url)
return body
def _get_contego_status(self, url):
resp, body = self.api.client.get(url)
return body
def _remove_node(self, url):
resp, body = self.api.client.delete(url)
def _add_node(self, url, ip):
body = {"ip": ip}
resp, body = self.api.client.post(url, body=body)
def _get_storage_usage(self, url):
resp, body = self.api.client.get(url)
return body
def _get_recentactivities(self, url):
resp, body = self.api.client.get(url)
return body
def _get_auditlog(self, url):
resp, body = self.api.client.get(url)
return body
def _get_tasks(self, url):
resp, body = self.api.client.get(url)
return body
def _settings(self, url, body):
resp, body = self.api.client.post(url, body=body)
return body["settings"]
def _license_list(self, url):
resp, body = self.api.client.get(url)
return body["license"]
def _license_check(self, url):
resp, body = self.api.client.get(url)
return body["message"]
def _snapshot_mounted_list(self, url):
resp, body = self.api.client.get(url)
return body
def _accept(self, url, body):
resp, body = self.api.client.post(url, body=body)
return body
def _complete(self, url):
resp, body = self.api.client.post(url)
def _reassign(self, url, response_key, args=None, method="reassign"):
if method == "list":
resp, body = self.api.client.get(url)
else:
workload_ids = args.workload_ids
old_tenant_ids = args.old_tenant_ids
new_tenant_id = args.new_tenant_id
user_id = args.user_id
migrate_cloud = args.migrate_cloud
map_file = args.map_file
if not (
workload_ids or old_tenant_ids or new_tenant_id or user_id or map_file
):
raise exceptions.CommandError("Please provide required parameters")
if map_file and (
workload_ids or old_tenant_ids or new_tenant_id or user_id
):
raise exceptions.CommandError(
"Please provide only file or other required parameters."
)
if workload_ids and old_tenant_ids:
raise exceptions.CommandError(
"Please provide onle one parameter"
" either workload_ids or old_tenant_ids"
)
if (workload_ids or old_tenant_ids) and (new_tenant_id is None):
raise exceptions.CommandError("Please provide new_tenant_id.")
if (workload_ids or old_tenant_ids) and (user_id is None):
raise exceptions.CommandError("Please provide user_id.")
reassign_map_list = []
if map_file:
maps = map_file["reassign_mappings"]
for workload_map in maps:
params_map = {
"workload_ids": workload_map.get("workload_ids", None),
"old_tenant_ids": workload_map.get("old_tenant_ids", None),
"new_tenant_id": workload_map.get("new_tenant_id", None),
"user_id": workload_map.get("user_id", None),
"migrate_cloud": workload_map.get("migrate_cloud", False),
}
if (
params_map["workload_ids"]
and not isinstance(params_map["workload_ids"], list)
) or (
params_map["old_tenant_ids"]
and not isinstance(params_map["old_tenant_ids"], list)
):
raise exceptions.CommandError(
"Workload id's/Old tenant id's should be in list format."
)
reassign_map_list.append(params_map)
else:
params_map = {
"workload_ids": workload_ids,
"old_tenant_ids": old_tenant_ids,
"new_tenant_id": new_tenant_id,
"user_id": user_id,
"migrate_cloud": migrate_cloud,
}
reassign_map_list.append(params_map)
resp, body = self.api.client.post(url, body=reassign_map_list)
data = body[response_key]
if isinstance(data, dict):
try:
data = data["values"]
except KeyError:
pass
if method == "list" and args.generate_yaml:
workload_ids = [str(workload["project_id"]) for workload in data]
reassign_map = {
"reassign_mappings": [
{
"migrate_cloud": False,
"user_id": None,
"new_tenant_id": None,
"old_tenant_ids": [],
"workload_ids": workload_ids,
}
]
}
with open("reassign_map.yaml", "w") as yaml_file:
yaml.dump(reassign_map, yaml_file, default_flow_style=False)
return data
def _get_tenants_usage(self, url):
resp, body = self.api.client.get(url)
return body
def _get_protected_vms(self, url):
resp, body = self.api.client.get(url)
return body
def _list_assigned_policies(self, url):
resp, body = self.api.client.get(url)
return body
def _get_tenants_chargeback(self, url):
resp, body = self.api.client.get(url)
return body
def _get_quota_data(self, url):
resp, body = self.api.client.get(url)
return body
def _get_vms(self, url):
resp, body = self.api.client.get(url)
return body
def _get_vcenter_vms(self, url):
resp, body = self.api.client.get(url)
return body
def _barbican_support(self, url):
resp, body = self.api.client.get(url)
return body
def _update_workloads_service(self, url, body):
resp, body = self.api.client.post(url, body=body)
return body
class ManagerWithFind(six.with_metaclass(abc.ABCMeta, Manager)):
"""
Like a `Manager`, but with additional `find()`/`findall()` methods.
"""
@abc.abstractmethod
def list(self):
pass
def find(self, **kwargs):
"""
Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
matches = self.findall(**kwargs)
num_matches = len(matches)
if num_matches == 0:
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
raise exceptions.NotFound(404, msg)
elif num_matches > 1:
raise exceptions.NoUniqueMatch
else:
return matches[0]
def findall(self, **kwargs):
"""
Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = list(kwargs.items())
for obj in self.list():
try:
if all(getattr(obj, attr) == value for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
class Resource(object):
"""
A resource represents a particular instance of an object (server, flavor,
etc). This is pretty much just a bag for attributes.
:param manager: Manager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
HUMAN_ID = False
def __init__(self, manager, info, loaded=False):
self.manager = manager
self._info = info
self._add_details(info)
self._loaded = loaded
# NOTE(sirp): ensure `id` is already present because if it isn't we'll
# enter an infinite loop of __getattr__ -> get -> __init__ ->
# __getattr__ -> ...
if "id" in self.__dict__ and len(str(self.id)) == 36:
self.manager.write_to_completion_cache("uuid", self.id)
human_id = self.human_id
if human_id:
self.manager.write_to_completion_cache("human_id", human_id)
@property
def human_id(self):
"""Subclasses may override this provide a pretty ID which can be used
for bash completion.
"""
if "name" in self.__dict__ and self.HUMAN_ID:
return utils.slugify(self.name)
return None
def _add_details(self, info):
for (k, v) in six.iteritems(info):
try:
setattr(self, k, v)
except AttributeError:
# In this case we already defined the attribute on the class
pass
def __getattr__(self, k):
if k not in self.__dict__:
# NOTE(bcwaldon): disallow lazy-loading if already loaded once
if not self.is_loaded():
self.get()
return self.__getattr__(k)
raise AttributeError(k)
else:
return self.__dict__[k]
def __repr__(self):
reprkeys = sorted(
k for k in list(self.__dict__.keys()) if k[0] != "_" and k != "manager"
)
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
def get(self):
# set_loaded() first ... so if we have to bail, we know we tried.
self.set_loaded(True)
if not hasattr(self.manager, "get"):
return
new = self.manager.get(self.id)
if new:
self._add_details(new._info)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if hasattr(self, "id") and hasattr(other, "id"):
return self.id == other.id
return self._info == other._info
def is_loaded(self):
return self._loaded
def set_loaded(self, val):
self._loaded = val