Why Gemfury? Push, build, and install  RubyGems npm packages Python packages Maven artifacts PHP packages Go Modules Debian packages RPM packages NuGet packages

Repository URL to install this package:

Details    
Size: Mime:
#!/usr/bin/python
# -*- coding: utf-8 -*-

import os
import traceback
from ansible.module_utils.basic import AnsibleModule

import boto3
import botocore

DOCUMENTATION = '''
---

module: validate_s3_credentials
short_description: Validates S3 credentails on this particular node
options:
  s3_access_key_id:
    description:
        - s3 access key
    required: yes
  s3_secret_access_key:
    description:
        - s3 secret access key
    required: yes
  s3_endpoint:
    description:
        - s3 endpoint, not needed for AWS s3
    required: no
    default: None
  use_ssl:
    description:
        - Use ssl connection to s3 store
    required: no
    choices: [ "yes", "no" ]
    default: yes
  s3_region:
    description:
        - region of s3 repository
    required: no
    default: 'us-east-2'
  s3_bucket:
    description:
        - s3 bucket where all backups are stored
    required: yes
  s3_signature_version:
    description:
        - Signature version, mostly used for minio
    required: no
    default: 'default'

requirements: [ boto3, botocore ]

author: Murali Balcha
'''

EXAMPLES = '''
examples:
- name: validate s3 bucket accessiblilty from triliovault node
  validate_s3_credentials:
    s3_bucket: 'cjktrilio2'
    s3_access_key_id: 'AKIAJ45JOULXXXXXXX'
    s3_secret_access_key: 'XzInFojGkykWWv1KXXXXXXXVphVsPlN+kUD027O8'

'''


def validate_s3_credentials(s3_access_key_id, s3_secret_access_key,
                            s3_endpoint, s3_region, s3_bucket,
                            use_ssl, s3_signature_version, vault_s3_ssl_cert):
    """ Validate the S3 credentials.

    Validate all of the S3 credentials by attempting to get
    some bucket information.

    Returns:
        Success will be returned otherwise error 403, 404, or
        500 will be retured with any relevent information.
    """

    s3_config_object = None
    if s3_signature_version != 'default' and s3_signature_version != '':
        s3_config_object = botocore.client.Config(
            signature_version=s3_signature_version)
    if vault_s3_ssl_cert:
        s3_client = boto3.client('s3',
                                 region_name=s3_region,
                                 use_ssl=use_ssl,
                                 aws_access_key_id=s3_access_key_id,
                                 aws_secret_access_key=s3_secret_access_key,
                                 endpoint_url=s3_endpoint,
                                 config=s3_config_object,
                                 verify=vault_s3_ssl_cert)
    else:
        s3_client = boto3.client('s3',
                                 region_name=s3_region,
                                 use_ssl=use_ssl,
                                 aws_access_key_id=s3_access_key_id,
                                 aws_secret_access_key=s3_secret_access_key,
                                 endpoint_url=s3_endpoint,
                                 config=s3_config_object,
                                 )

    s3_client.head_bucket(Bucket=s3_bucket)

    # Add a check to see if the current object store will support
    # our path length.
    long_key = os.path.join(
        'tvault_config/',
        'workload_f5190be6-7f80-4856-8c24-149cb40500c5/',
        'snapshot_f2e5c6a7-3c21-4b7f-969c-915bb408c64f/',
        'vm_id_e81d1ac8-b49a-4ccf-9d92-5f1ef358f1be/',
        'vm_res_id_72477d99-c475-4a5d-90ae-2560f5f3b319_vda/',
        'deac2b8a-dca9-4415-adc1-f3c6598204ed-segments/',
        '0000000000000000.00000000')
    s3_client.put_object(
        Bucket=s3_bucket, Key=long_key, Body='Test Data')

    s3_client.delete_object(Bucket=s3_bucket, Key=long_key)

    return {'status': 'Success'}


def main():

    module = AnsibleModule(
        argument_spec=dict(
            s3_access_key_id=dict(required=True),
            s3_secret_access_key=dict(required=True),
            s3_endpoint=dict(required=False, default=None),
            use_ssl=dict(required=False, default=True, type='bool'),
            s3_region=dict(required=False, default='us-east-2'),
            s3_bucket=dict(required=True),
            s3_signature_version=dict(required=False, default='default'),
            vault_s3_ssl_cert=dict(required=False, default='')
        ),
    )

    s3_access_key_id = module.params['s3_access_key_id']
    s3_secret_access_key = module.params['s3_secret_access_key']
    s3_endpoint = module.params['s3_endpoint'] if module.params['s3_endpoint'] else None
    use_ssl = module.params['use_ssl']
    s3_region = module.params['s3_region']
    s3_bucket = module.params['s3_bucket']
    s3_signature_version = module.params['s3_signature_version']
    vault_s3_ssl_cert = module.params['vault_s3_ssl_cert']

    try:
        validate_s3_credentials(s3_access_key_id, s3_secret_access_key,
                                s3_endpoint, s3_region, s3_bucket,
                                use_ssl, s3_signature_version, vault_s3_ssl_cert)

        d = dict(changed=True)
    except Exception:
        module.fail_json(msg=traceback.format_exc())
    else:
        module.exit_json(**d)


# this is magic, see lib/ansible/module_common.py
# <<INCLUDE_ANSIBLE_MODULE_COMMON>>
if __name__ == '__main__':
    main()