Why Gemfury? Push, build, and install  RubyGems npm packages Python packages Maven artifacts PHP packages Go Modules Debian packages RPM packages NuGet packages

Repository URL to install this package:

Details    
Size: Mime:
# Copyright (c) 2014 TrilioData, Inc
# All Rights Reserved.


from __future__ import print_function
import os
import re
import sys
import uuid

import six
import prettytable
import textwrap

from workloadmgrclient import exceptions
from workloadmgrclient.openstack.common import strutils
from workloadmgrclient import timezone
from datetime import datetime, date
import calendar

import json
import ast
import pytz
import time

EULA_AGREEMENT_FILE_NAME = 'EULA.txt'

def validate_int_value(value, lower_bound, upper_bound=None, **kwargs):
    if isinstance(value, str):
        try:
            value = int(value)
        except ValueError:
            return False
    if value < lower_bound:
        return False
    if upper_bound and value > upper_bound:
        return False
    return True


def arg(*args, **kwargs):
    """Decorator for CLI args."""

    def _decorator(func):
        add_arg(func, *args, **kwargs)
        return func

    return _decorator


def env(*vars, **kwargs):
    """
    returns the first environment variable set
    if none are non-empty, defaults to '' or keyword arg default
    """
    for v in vars:
        value = os.environ.get(v, None)
        if value:
            return value
    return kwargs.get("default", "")


def add_arg(f, *args, **kwargs):
    """Bind CLI arguments to a shell.py `do_foo` function."""
    if not hasattr(f, "arguments"):
        f.arguments = []
    # NOTE(sirp): avoid dups that can occur when the module is shared across
    # tests.
    if (args, kwargs) not in f.arguments:
        # Because of the sematics of decorator composition if we just append
        # to the options list positional options will appear to be backwards.
        f.arguments.insert(0, (args, kwargs))


def add_resource_manager_extra_kwargs_hook(f, hook):
    """Adds hook to bind CLI arguments to ResourceManager calls.

    The `do_foo` calls in shell.py will receive CLI args and then in turn pass
    them through to the ResourceManager. Before passing through the args, the
    hooks registered here will be called, giving us a chance to add extra
    kwargs (taken from the command-line) to what's passed to the
    ResourceManager.
    """
    if not hasattr(f, "resource_manager_kwargs_hooks"):
        f.resource_manager_kwargs_hooks = []

    names = [h.__name__ for h in f.resource_manager_kwargs_hooks]
    if hook.__name__ not in names:
        f.resource_manager_kwargs_hooks.append(hook)


def get_resource_manager_extra_kwargs(f, args, allow_conflicts=False):
    """Return extra_kwargs by calling resource manager kwargs hooks."""
    hooks = getattr(f, "resource_manager_kwargs_hooks", [])
    extra_kwargs = {}
    for hook in hooks:
        hook_name = hook.__name__
        hook_kwargs = hook(args)

        conflicting_keys = set(hook_kwargs.keys()) & set(extra_kwargs.keys())
        if conflicting_keys and not allow_conflicts:
            raise Exception(
                "Hook '%(hook_name)s' is attempting to redefine"
                " attributes '%(conflicting_keys)s'" % locals()
            )

        extra_kwargs.update(hook_kwargs)

    return extra_kwargs


def unauthenticated(f):
    """
    Adds 'unauthenticated' attribute to decorated function.
    Usage:
        @unauthenticated
        def mymethod(f):
            ...
    """
    f.unauthenticated = True
    return f


def isunauthenticated(f):
    """
    Checks to see if the function is marked as not requiring authentication
    with the @unauthenticated decorator. Returns True if decorator is
    set to True, False otherwise.
    """
    return getattr(f, "unauthenticated", False)


def service_type(stype):
    """
    Adds 'service_type' attribute to decorated function.
    Usage:
        @service_type('volume')
        def mymethod(f):
            ...
    """

    def inner(f):
        f.service_type = stype
        return f

    return inner


def get_service_type(f):
    """
    Retrieves service type from function
    """
    return getattr(f, "service_type", None)


def pretty_choice_list(l):
    return ", ".join("'%s'" % i for i in l)


def print_list(objs, fields, formatters={}, headers=[], colwidth=[]):
    mixed_case_fields = [
        "serverId",
        "UserName",
        "ObjectName",
        "Timestamp",
        "UserId",
        "Details",
    ]
    if headers:
        pt = prettytable.PrettyTable(
            [f for f in headers], caching=False, colWidths=colwidth
        )
    else:
        pt = prettytable.PrettyTable(
            [f for f in fields], caching=False, colWidths=colwidth
        )
    pt.aligns = ["l" for f in fields]

    sortby_index = 0

    for o in objs:
        row = []
        i = 0
        for field in fields:
            if field in formatters:
                row.append(formatters[field](o))
            else:
                if field in mixed_case_fields:
                    field_name = field.replace(" ", "_")
                else:
                    field_name = field.lower().replace(" ", "_")

                if field_name.lower() in ("created_at", "project_id", "timestamp"):
                    sortby_index = i
                if type(o) is dict:
                    data = o[field_name]
                else:
                    data = getattr(o, field_name, "")
                row.append(data)
            i = i + 1
        pt.add_row(row)

    if len(pt._rows) > 0:
        print(strutils.safe_encode(pt.get_string(sortby=fields[sortby_index])).decode())


def print_object(objs, fields, formatters={}, headers=[], colwidth=[]):
    mixed_case_fields = ["serverId"]
    if headers:
        pt = prettytable.PrettyTable(
            [f for f in headers], caching=False, colWidths=colwidth
        )
    else:
        pt = prettytable.PrettyTable(
            [f for f in fields], caching=False, colWidths=colwidth
        )
    pt.aligns = ["l" for f in fields]

    sortby_index = 0

    row = []
    i = 0
    for field in fields:
        if field in mixed_case_fields:
            field_name = field.replace(" ", "_")
        else:
            field_name = field.lower().replace(" ", "_")

        data = getattr(objs, field_name, "")
        row.append(data)
        i = i + 1
    pt.add_row(row)

    if len(pt._rows) > 0:
        print(strutils.safe_encode(pt.get_string(sortby=fields[sortby_index])).decode())


def print_file_search_dict(d, dict_property="Property", dict_value="Value", wrap=0):
    for k, v in sorted(d.items()):
        pt = prettytable.PrettyTable(
            ["Disks of " + k, "File paths", "Metadata"], caching=False
        )
        pt.align = "l"
        if v and isinstance(v, list):
            for drive in v:
                val1 = ""
                for k1, v1 in sorted(drive.items()):
                    if type(v1) is not list:
                        continue
                    if len(v1) == 0:
                        val1 = "File path not existing"
                        pt.add_row([k1, val1, ""])
                    i = 0
                    for path in v1:
                        val1 = path
                        pt.add_row([k1, val1, drive.get(path, '')])
                        i = i + 1
        print(strutils.safe_encode(pt.get_string()).decode())


def print_dict(d, dict_property="Property", dict_value="Value", wrap=0):
    pt = prettytable.PrettyTable([dict_property, dict_value], caching=False)
    pt.align = "l"
    for k, v in sorted(d.items()):
        # convert dict to str to check length
        if any([isinstance(v, dict), isinstance(v, list)]):
            v = json.dumps(v)
            v = str(v)
        if wrap > 0:
            v = textwrap.fill(str(v), wrap)
        # if value has a newline, add in multiple rows
        # e.g. fault with stacktrace
        if v and isinstance(v, six.string_types) and r"\n" in v:
            lines = v.strip().split(r"\n")
            col1 = k
            for line in lines:
                pt.add_row([col1, line])
                col1 = ""
        else:
            pt.add_row([k, v])
    print(strutils.safe_encode(pt.get_string()).decode())


def print_data_vertically(data, cols):
    pt = prettytable.PrettyTable(cols)
    for item in data:
        if isinstance(item, dict):
            for k, v in list(item.items()):
                pt.add_row([k, v])
            pt.add_row(["", ""])
        elif isinstance(item, list):
            for value in item:
                pt.add_row([value])
    print(strutils.safe_encode(pt.get_string()).decode())


def find_resource(manager, name_or_id, query_string=None):
    """
        Helper for the _find_* methods.
        :query_string is passed to get method. if query_string is not present None is passed.
    """
    # first try to get entity as integer id
    try:
        if isinstance(name_or_id, int) or name_or_id.isdigit():
            return manager.get(int(name_or_id), query_string)
    except exceptions.NotFound:
        pass

    # now try to get entity as uuid
    try:
        uuid.UUID(strutils.safe_decode(name_or_id))
        return manager.get(name_or_id, query_string)
    except (ValueError, exceptions.NotFound):
        pass

    try:
        try:
            obj = manager.find(human_id=name_or_id)
            return manager.get(obj.id, query_string)
        except exceptions.NotFound:
            pass

        # finally try to find entity by name
        try:
            obj = manager.find(name=name_or_id)
            return manager.get(obj.id, query_string)
        except exceptions.NotFound:
            try:
                obj = manager.find(display_name=name_or_id)
                return manager.get(obj.id, query_string)
            except (UnicodeDecodeError, exceptions.NotFound):
                try:
                    # Volumes does not have name, but display_name
                    obj = manager.find(display_name=name_or_id)
                    return manager.get(obj.id, query_string)
                except exceptions.NotFound:
                    msg = "No %s with a name or ID of '%s' exists." % (
                        manager.resource_class.__name__.lower(),
                        name_or_id,
                    )
                    raise exceptions.CommandError(msg)
    except exceptions.NoUniqueMatch:
        msg = (
            "Multiple %s matches found for '%s', use an ID to be more"
            " specific." % (manager.resource_class.__name__.lower(), name_or_id)
        )
        raise exceptions.CommandError(msg)


def _format_servers_list_networks(server):
    output = []
    for (network, addresses) in list(server.networks.items()):
        if len(addresses) == 0:
            continue
        addresses_csv = ", ".join(addresses)
        group = "%s=%s" % (network, addresses_csv)
        output.append(group)

    return "; ".join(output)


class HookableMixin(object):
    """Mixin so classes can register and run hooks."""

    _hooks_map = {}

    @classmethod
    def add_hook(cls, hook_type, hook_func):
        if hook_type not in cls._hooks_map:
            cls._hooks_map[hook_type] = []

        cls._hooks_map[hook_type].append(hook_func)

    @classmethod
    def run_hooks(cls, hook_type, *args, **kwargs):
        hook_funcs = cls._hooks_map.get(hook_type) or []
        for hook_func in hook_funcs:
            hook_func(*args, **kwargs)


def safe_issubclass(*args):
    """Like issubclass, but will just return False if not a class."""

    try:
        if issubclass(*args):
            return True
    except TypeError:
        pass

    return False


def import_class(import_str):
    """Returns a class from a string including module and class."""
    mod_str, _sep, class_str = import_str.rpartition(".")
    __import__(mod_str)
    return getattr(sys.modules[mod_str], class_str)


_slugify_strip_re = re.compile(r"[^\w\s-]")
_slugify_hyphenate_re = re.compile(r"[-\s]+")


# http://code.activestate.com/recipes/
#   577257-slugify-make-a-string-usable-in-a-url-or-filename/
def slugify(value):
    """
    Normalizes string, converts to lowercase, removes non-alpha characters,
    and converts spaces to hyphens.

    From Django's "django/template/defaultfilters.py".
    """
    import unicodedata

    if not isinstance(value, six.text_type):
        value = six.text_type(value)
    value = unicodedata.normalize("NFKD", value).encode("ascii", "ignore")
    value = six.text_type(_slugify_strip_re.sub("", value).strip().lower())
    return _slugify_hyphenate_re.sub("-", value)


def bytes_fmt(num, suffix="B"):
    num = int(num)
    for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
        if abs(num) < 1024.0:
            return "%3.1f%s%s" % (num, unit, suffix)
        num /= 1024.0
    return "%.1f%s%s" % (num, "Y", suffix)


def humanize_time(seconds, granularity=5):
    try:
        result = []
        intervals = (
            ("weeks", 604800),  # 60 * 60 * 24 * 7
            ("days", 86400),  # 60 * 60 * 24
            ("hours", 3600),  # 60 * 60
            ("minutes", 60),
            ("seconds", 1),
        )
        for name, count in intervals:
            value = seconds // count
            if value:
                seconds -= value * count
                if value == 1:
                    name = name.rstrip("s")
                result.append("{} {}".format(value, name))
        return ", ".join(result[:granularity])
    except Exception as ex:
        return seconds


def read_file(file_obj, eval=False):
    try:
        data = file_obj.read()
        if len(data) == 0:
            message = "File %s is empty." % file_obj.name
            raise exceptions.CommandError(message)
        if eval is True:
            try:
                data = ast.literal_eval(data)
            except Exception as ex:
                print("Error: " + ex.msg + "\nIn parsing " + file_obj.name)
                exit()
        return data
    except Exception as ex:
        raise ex
    finally:
        file_obj.close()

valid_snap_type = ['full', 'incremental']

weekdays_abbrev = [(date(1900, 1, day).strftime('%a')).lower() for day in range(1, 8)]

months_abbrev = [calendar.month_abbr[month].lower() for month in range(1, 13)]

days_month = [_day for _day in range(1, 33)]

days_month.append("last")

schedule_validation_map = {
        "hourly" : {
            "interval": {
                "lower_bound": 1,
                "upper_bound": 24
                },
            "retention": {
                "lower_bound": 1,
                "upper_bound": 3072,
                "default": 3
                },
            "snapshot_type": valid_snap_type,
            "child": ["daily"]
            },
        "daily" : {
            "backup_time":[],
            "retention":{
                "lower_bound": 1,
                "upper_bound": 365,
                "default": 7
                },
            "snapshot_type": valid_snap_type,
            "child": ["weekly", "monthly"]
            },
        "weekly" : {
            "backup_day": weekdays_abbrev,
            "retention":{
                "lower_bound": 1,
                "upper_bound": 156,
                "default": 7
                },
            "snapshot_type": valid_snap_type
            },
        "monthly" : {
            "month_backup_day": days_month,
            "retention":{
                "lower_bound": 1,
                "upper_bound": 60,
                "default": 12
                },
            "snapshot_type": valid_snap_type,
            "child": ["yearly"]
            },
        "yearly" : {
            "backup_month":months_abbrev,
            "retention":{
                "lower_bound": 1,
                "upper_bound": 30,
                "default": 1
                },
            "snapshot_type": valid_snap_type
            },
   }

manual_validation_map = {
        "manual": {
            "retention":{
                "lower_bound": 1,
                "upper_bound": 1000
            },
            "retention_days_to_keep":{
                "lower_bound": 1,
                "upper_bound": 365
                }
        }
}

def validate_scheduler_fields(policy_params):
    start = ["hourly"]
    policy_fields = list(policy_params.keys())
    last_snapshot_type = None
    if policy_params.get("enabled", False) in [ True , "true", "True"]:
        policy_fields.remove("enabled")
        if "start_date" in policy_params:
            policy_fields.remove("start_date")
        else:
            raise exceptions.CommandError(f"Scheduler 'start_date' missing")

        if "start_time" in policy_params:
            policy_fields.remove("start_time")
        else:
            raise exceptions.CommandError(f"Scheduler 'start_time' missing")

        opt_args = ['end_date', 'timezone']
        for opt_arg in opt_args:
            if opt_arg in policy_params:
                policy_fields.remove(opt_arg)

        while len(start) > 0:
            child_flag = False
            must_field_flag = False
            child=[]
            cur_snapshot_type = None
            weekly_snapshot_type = None
            for policy_field in start:
                if policy_field in policy_fields:
                    policy_fields.remove(policy_field)
                    child.extend(schedule_validation_map[policy_field].get("child", []))
                    child_flag = True

                    if policy_field not in schedule_validation_map:
                        raise exceptions.CommandError(f"Unknown scheduler field '{policy_field}'")

                    must_field = list(schedule_validation_map[policy_field].keys() - (["retention", "snapshot_type", "child"]))
                    if len(must_field) > 0:
                        mandate_field = must_field[0]
                    else:
                        raise exceptions.CommandError(f"Mandatory sub field missing for scheduler '{policy_field}'")

                    if mandate_field not in policy_params[policy_field]:
                        raise exceptions.CommandError(f"Missing argument '{mandate_field}' for scheduler '{policy_field}' field.")

                    if "retention" not in policy_params[policy_field]:
                        policy_params[policy_field]["retention"] = schedule_validation_map[policy_field]["retention"]["default"]

                    if "snapshot_type" not in policy_params[policy_field]:
                        policy_params[policy_field]["snapshot_type"] = "full" if last_snapshot_type == "full" or policy_field == "yearly" else "incremental"

                    if "weekly" == policy_field:
                        weekly_snapshot_type = policy_params[policy_field]["snapshot_type"]
                    else:
                        cur_snapshot_type = policy_params[policy_field]["snapshot_type"]
                else:
                    raise exceptions.CommandError(f"Scheduler field '{policy_field}' missing from scheduler cli.")

            if weekly_snapshot_type:
                if last_snapshot_type != weekly_snapshot_type and weekly_snapshot_type != "full":
                    raise exceptions.CommandError(f"Invalid snapshot type for 'weekly'")

            if last_snapshot_type == "full" and cur_snapshot_type == "incremental":
                raise exceptions.CommandError(f"Snapshot_type must be 'full' for scheduler field {start}")

            last_snapshot_type = cur_snapshot_type
            if child_flag == True:
                start=child
            else:
                raise exceptions.CommandError(f"Missing argument '{mandate_field}' for scheduler field '{policy_field}'")

        if len(policy_fields) > 0:
            raise exceptions.CommandError(f"Scheduler dependencies field missing for '{policy_fields}'")
    return policy_params

def check_snapshot_type(snap_type):
    return True if snap_type in valid_snap_type else False


def validate_retention(sch_val, schedule_type):
    sch_key = "retention"
    valid_value = validate_int_value( sch_val, schedule_validation_map[schedule_type][sch_key]["lower_bound"],
                                         schedule_validation_map[schedule_type][sch_key]["upper_bound"])
    if not valid_value:
        err_msg = ( "Invalid value '{}' provided for '{}' '{}'"
                    " check help for more information")
        raise exceptions.CommandError(err_msg.format(sch_val, schedule_type, sch_key))


def validate_manual_retention(sch_val, schedule_type):
    sch_key = "retention"
    valid_value = validate_int_value( sch_val, manual_validation_map[schedule_type][sch_key]["lower_bound"],
                                         manual_validation_map[schedule_type][sch_key]["upper_bound"])
    if not valid_value:
        err_msg = ( "Invalid value '{}' provided for '{}' '{}'"
                    " check help for more information")
        raise exceptions.CommandError(err_msg.format(sch_val, schedule_type, sch_key))


def validate_snapshot_type(sch_val, schedule_type):
    if not check_snapshot_type(sch_val):
         err_msg = (f"Invalid {schedule_type} argument 'snapshot_type'. valid examples: 'full' or 'incremental'")
         raise exceptions.CommandError(err_msg)

def validate_generic_schedule(schedule_options, schedule_type):
    schedule = {}

    #merging internal option list [[k1=v1, k2=v2], [k3=v3]] to single list [k1=v1, k2=v2, k3=v3]
    schedule_str= [option for schedule_option in schedule_options for option in schedule_option]

    for gen_schedule in schedule_str:
        err_msg = (
            f"Invalid {schedule_type} argument '{schedule_str}'. jobschedule arguments must be of the "
            f"form --{schedule_type} [<key=value> [<key=value> ...]]"
        )
        for kv_str in gen_schedule.split(","):
            try:
                sch_key, sch_val = kv_str.split("=", 1)
            except ValueError:
                raise exceptions.CommandError(err_msg)
            if sch_key == "retention" or sch_key == "retention_days_to_keep":
                if schedule_type == 'manual':
                    validate_manual_retention(sch_val, schedule_type)
                else:
                    validate_retention(sch_val, schedule_type)
            elif sch_key == "snapshot_type":
                validate_snapshot_type(sch_val, schedule_type)
            elif sch_key == "backup_time":
                sch_val = re.sub(r"\s+", ',', sch_val.strip("'")).split(",")
                for sval in sch_val:
                    try:
                        datetime.strptime(sval, '%H:%M')
                    except ValueError:
                        raise exceptions.CommandError(f"Invalid jobschedule argument 'backup_time'."
                        "backup_time {sval} must be in format '%H:%M' eg. '15:00', 10:30")
            elif sch_key == "backup_day":
                sch_val = re.sub(r"\s+", ',', sch_val).split(",")
                for sval in sch_val:
                    if sval not in schedule_validation_map[schedule_type][sch_key]:
                        raise exceptions.CommandError(f"Invalid jobschedule argument 'backup_day={sch_val}'."
                        "backup_day must be in format 'mon' eg. 'mon', sun")
            elif sch_key == "month_backup_day":
                sch_val = re.sub(r"\s+", ',', sch_val).split(",")
                given_month_backup_day = [ int(sval) for sval in sch_val]
                if not set(given_month_backup_day).issubset(set(schedule_validation_map[schedule_type][sch_key])):
                    value_diff = set(given_month_backup_day) - set(schedule_validation_map[schedule_type][sch_key])
                    raise exceptions.CommandError(f"Invalid 'month_backup_day' value {value_diff}."
                            "month_backup_day must be in range 1-31, 'last'")

            elif sch_key == "backup_month":
                sch_val = re.sub(r"\s+", ',', sch_val).split(",")
                for sval in sch_val:
                    if sval not in schedule_validation_map[schedule_type][sch_key]:
                        raise exceptions.CommandError(f"Invalid jobschedule argument 'backup_month'."
                        "backup_month value within the range jan-dec.")
            else:
                sch_val = re.sub(r"\s+", ',', sch_val)

            schedule.setdefault(sch_key, sch_val)

    if schedule_type != 'manual':
        required_field = schedule_validation_map[schedule_type].keys()
        if not set(schedule.keys()).issubset(set(required_field)):
            err_msg = (f"Invalid {schedule_type} argument given."
                "only following field{schedule.keys()} need to provided {required_field} ")
            raise exceptions.CommandError(err_msg)

    return schedule

def validate_policy_param(parsed_args):
    policy_params = {}
    for schedule_type in schedule_validation_map.keys():
        policy_params.setdefault(schedule_type,
                validate_generic_schedule(getattr(parsed_args, schedule_type), schedule_type))
    policy_params = validate_scheduler_fields(policy_params)

    return policy_params

def validate_backup_time(schedule_info):
    start_time = schedule_info.get("start_time")
    interval =  schedule_info["hourly"].get("interval") if schedule_info.get("hourly") else None
    invalid_time = []

    if start_time and interval:
        time_obj = datetime.strptime(start_time, "%H:%M %p")
        hour = int(time_obj.strftime("%H"))
        hourly_interval = int(interval)
        pre_hr = 0
        while hour > 0:
            pre_hr = hour
            hour -= hourly_interval
        hour = pre_hr
        backup_time_list = []
        while hour < 24:
            btime = str('{:02d}'.format(hour)) + ":" + str(time_obj.strftime("%M"))
            backup_time_list.append(btime)
            hour += hourly_interval

        if schedule_info.get("daily"):
            backup_time = schedule_info["daily"].get("backup_time", [])
            for bt in  backup_time:
                if bt not in backup_time_list:
                    invalid_time.append(bt)
    if invalid_time:
        err_msg = (f"Invalid backup time provided under daily retention {invalid_time}."\
                " Please ensure the backup time falls within the specified start time" \
                "and interval range for daily retention.")
        raise exceptions.CommandError(err_msg)

def validate_job_scheduler_param(parsed_args):
    job_schedule = {}

    for jobschedule_str in parsed_args.jobschedule:
        err_msg = (
            "Invalid jobschedule argument '%s'. jobschedule arguments must be of the "
            "form --jobschedule <key=value>" % jobschedule_str
        )

        for kv_str in jobschedule_str.split(","):
            try:
                sch_key, sch_val = kv_str.split("=", 1)
            except ValueError:
                raise exceptions.CommandError(err_msg)

            if sch_key =='start_time':
                try:
                    datetime.strptime(sch_val, '%I:%M %p')
                except ValueError:
                    raise exceptions.CommandError("Invalid jobschedule argument 'start_time'."\
                    "start_time must be in format '%I:%M %p' eg. '3:00 PM', 10:30 AM")

            if sch_key == 'timezone':
                try:
                    import pytz

                    pytz.timezone(sch_val)
                except Exception:
                    raise exceptions.CommandError("Invalid timezone: {}. Please specify valid timeone.".format(sch_val))
            job_schedule.setdefault(sch_key, sch_val)

    for schedule_type in schedule_validation_map.keys():
        sch_val = validate_generic_schedule( getattr(parsed_args, schedule_type), schedule_type)
        if sch_val:
            job_schedule.setdefault( schedule_type, sch_val)
    validate_backup_time(job_schedule)

    if len(job_schedule) >= 1 and "enabled" not in job_schedule:
        raise exceptions.CommandError(
            "Please specify --jobschedule enabled option in order to set scheduler for this workload"
        )

    if len(job_schedule) == 0:
        job_schedule["enabled"] = False

    job_schedule = validate_scheduler_fields(job_schedule)

    # validate manual retention parameters
    for schedule_type in manual_validation_map.keys():
        sch_val = validate_generic_schedule([getattr(parsed_args, schedule_type)], schedule_type)
        if sch_val:
            job_schedule.setdefault(schedule_type, sch_val)

    if "timezone" not in job_schedule:
        job_schedule.setdefault("timezone", timezone.get_localzone().zone)

    return job_schedule


def get_eula_text():
    global EULA_AGREEMENT_FILE_NAME
    eula_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), EULA_AGREEMENT_FILE_NAME)
    with open(eula_file_path, 'r', encoding='utf-8') as eula_fd:
        eula_text = eula_fd.read()
    return eula_text