Learn more  » Push, build, and install  RubyGems npm packages Python packages Maven artifacts PHP packages Go Modules Bower components Debian packages RPM packages NuGet packages

arrow-nightlies / pyarrow   python

Repository URL to install this package:

/ _s3fs.pyx

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

# cython: language_level = 3

from cython cimport binding

from pyarrow.lib cimport (check_status, pyarrow_wrap_metadata,
                          pyarrow_unwrap_metadata)
from pyarrow.lib import frombytes, tobytes, KeyValueMetadata
from pyarrow.includes.common cimport *
from pyarrow.includes.libarrow cimport *
from pyarrow.includes.libarrow_fs cimport *
from pyarrow._fs cimport FileSystem


cpdef enum S3LogLevel:
    Off = <int8_t> CS3LogLevel_Off
    Fatal = <int8_t> CS3LogLevel_Fatal
    Error = <int8_t> CS3LogLevel_Error
    Warn = <int8_t> CS3LogLevel_Warn
    Info = <int8_t> CS3LogLevel_Info
    Debug = <int8_t> CS3LogLevel_Debug
    Trace = <int8_t> CS3LogLevel_Trace


def initialize_s3(S3LogLevel log_level=S3LogLevel.Fatal, int num_event_loop_threads=1):
    """
    Initialize S3 support

    Parameters
    ----------
    log_level : S3LogLevel
        level of logging
    num_event_loop_threads : int, default 1
        how many threads to use for the AWS SDK's I/O event loop

    Examples
    --------
    >>> fs.initialize_s3(fs.S3LogLevel.Error) # doctest: +SKIP
    """
    cdef CS3GlobalOptions options
    options.log_level = <CS3LogLevel> log_level
    options.num_event_loop_threads = num_event_loop_threads
    check_status(CInitializeS3(options))


def ensure_s3_initialized():
    """
    Initialize S3 (with default options) if not already initialized
    """
    check_status(CEnsureS3Initialized())


def finalize_s3():
    check_status(CFinalizeS3())


def ensure_s3_finalized():
    """
    Finalize S3 if already initialized
    """
    check_status(CEnsureS3Finalized())


def resolve_s3_region(bucket):
    """
    Resolve the S3 region of a bucket.

    Parameters
    ----------
    bucket : str
        A S3 bucket name

    Returns
    -------
    region : str
        A S3 region name

    Examples
    --------
    >>> fs.resolve_s3_region('voltrondata-labs-datasets')
    'us-east-2'
    """
    cdef:
        c_string c_bucket
        c_string c_region

    ensure_s3_initialized()

    c_bucket = tobytes(bucket)
    with nogil:
        c_region = GetResultValue(ResolveS3BucketRegion(c_bucket))

    return frombytes(c_region)


class S3RetryStrategy:
    """
    Base class for AWS retry strategies for use with S3.

    Parameters
    ----------
    max_attempts : int, default 3
        The maximum number of retry attempts to attempt before failing.
    """

    def __init__(self, max_attempts=3):
        self.max_attempts = max_attempts


class AwsStandardS3RetryStrategy(S3RetryStrategy):
    """
    Represents an AWS Standard retry strategy for use with S3.

    Parameters
    ----------
    max_attempts : int, default 3
        The maximum number of retry attempts to attempt before failing.
    """
    pass


class AwsDefaultS3RetryStrategy(S3RetryStrategy):
    """
    Represents an AWS Default retry strategy for use with S3.

    Parameters
    ----------
    max_attempts : int, default 3
        The maximum number of retry attempts to attempt before failing.
    """
    pass


cdef class S3FileSystem(FileSystem):
    """
    S3-backed FileSystem implementation

    AWS access_key and secret_key can be provided explicitly.

    If role_arn is provided instead of access_key and secret_key, temporary
    credentials will be fetched by issuing a request to STS to assume the
    specified role.

    If neither access_key nor secret_key are provided, and role_arn is also not
    provided, then attempts to establish the credentials automatically.
    S3FileSystem will try the following methods, in order:

    * ``AWS_ACCESS_KEY_ID``, ``AWS_SECRET_ACCESS_KEY``, and ``AWS_SESSION_TOKEN`` environment variables
    * configuration files such as ``~/.aws/credentials`` and ``~/.aws/config``
    * for nodes on Amazon EC2, the EC2 Instance Metadata Service

    Note: S3 buckets are special and the operations available on them may be
    limited or more expensive than desired.

    When S3FileSystem creates new buckets (assuming allow_bucket_creation is
    True), it does not pass any non-default settings. In AWS S3, the bucket and
    all objects will be not publicly visible, and will have no bucket policies
    and no resource tags. To have more control over how buckets are created,
    use a different API to create them.

    Parameters
    ----------
    access_key : str, default None
        AWS Access Key ID. Pass None to use the standard AWS environment
        variables and/or configuration file.
    secret_key : str, default None
        AWS Secret Access key. Pass None to use the standard AWS environment
        variables and/or configuration file.
    session_token : str, default None
        AWS Session Token.  An optional session token, required if access_key
        and secret_key are temporary credentials from STS.
    anonymous : boolean, default False
        Whether to connect anonymously if access_key and secret_key are None.
        If true, will not attempt to look up credentials using standard AWS
        configuration methods.
    role_arn : str, default None
        AWS Role ARN.  If provided instead of access_key and secret_key,
        temporary credentials will be fetched by assuming this role.
    session_name : str, default None
        An optional identifier for the assumed role session.
    external_id : str, default None
        An optional unique identifier that might be required when you assume
        a role in another account.
    load_frequency : int, default 900
        The frequency (in seconds) with which temporary credentials from an
        assumed role session will be refreshed.
    region : str, default None
        AWS region to connect to. If not set, the AWS SDK will attempt to
        determine the region using heuristics such as environment variables,
        configuration profile, EC2 metadata, or default to 'us-east-1' when SDK
        version <1.8. One can also use :func:`pyarrow.fs.resolve_s3_region` to
        automatically resolve the region from a bucket name.
    request_timeout : double, default None
        Socket read timeouts on Windows and macOS, in seconds.
        If omitted, the AWS SDK default value is used (typically 3 seconds).
        This option is ignored on non-Windows, non-macOS systems.
    connect_timeout : double, default None
        Socket connection timeout, in seconds.
        If omitted, the AWS SDK default value is used (typically 1 second).
    scheme : str, default 'https'
        S3 connection transport scheme.
    endpoint_override : str, default None
        Override region with a connect string such as "localhost:9000"
    background_writes : boolean, default True
        Whether file writes will be issued in the background, without
        blocking.
    default_metadata : mapping or pyarrow.KeyValueMetadata, default None
        Default metadata for open_output_stream.  This will be ignored if
        non-empty metadata is passed to open_output_stream.
    proxy_options : dict or str, default None
        If a proxy is used, provide the options here. Supported options are:
        'scheme' (str: 'http' or 'https'; required), 'host' (str; required),
        'port' (int; required), 'username' (str; optional),
        'password' (str; optional).
        A proxy URI (str) can also be provided, in which case these options
        will be derived from the provided URI.
        The following are equivalent::

            S3FileSystem(proxy_options='http://username:password@localhost:8020')
            S3FileSystem(proxy_options={'scheme': 'http', 'host': 'localhost',
                                        'port': 8020, 'username': 'username',
                                        'password': 'password'})
    allow_bucket_creation : bool, default False
        Whether to allow CreateDir at the bucket-level. This option may also be
        passed in a URI query parameter.
    allow_bucket_deletion : bool, default False
        Whether to allow DeleteDir at the bucket-level. This option may also be
        passed in a URI query parameter.
    retry_strategy : S3RetryStrategy, default AwsStandardS3RetryStrategy(max_attempts=3)
        The retry strategy to use with S3; fail after max_attempts. Available
        strategies are AwsStandardS3RetryStrategy, AwsDefaultS3RetryStrategy.
    force_virtual_addressing : bool, default False
        Whether to use virtual addressing of buckets.
        If true, then virtual addressing is always enabled.
        If false, then virtual addressing is only enabled if `endpoint_override` is empty.
        This can be used for non-AWS backends that only support virtual hosted-style access.

    Examples
    --------
    >>> from pyarrow import fs
    >>> s3 = fs.S3FileSystem(region='us-west-2')
    >>> s3.get_file_info(fs.FileSelector(
    ...    'power-analysis-ready-datastore/power_901_constants.zarr/FROCEAN', recursive=True
    ... ))
    [<FileInfo for 'power-analysis-ready-datastore/power_901_constants.zarr/FROCEAN/.zarray...

    For usage of the methods see examples for :func:`~pyarrow.fs.LocalFileSystem`.
    """

    cdef:
        CS3FileSystem* s3fs

    def __init__(self, *, access_key=None, secret_key=None, session_token=None,
                 bint anonymous=False, region=None, request_timeout=None,
                 connect_timeout=None, scheme=None, endpoint_override=None,
                 bint background_writes=True, default_metadata=None,
                 role_arn=None, session_name=None, external_id=None,
                 load_frequency=900, proxy_options=None,
                 allow_bucket_creation=False, allow_bucket_deletion=False,
                 retry_strategy: S3RetryStrategy = AwsStandardS3RetryStrategy(
                     max_attempts=3),
                 force_virtual_addressing=False):
        cdef:
            optional[CS3Options] options
            shared_ptr[CS3FileSystem] wrapped

        # Need to do this before initializing `options` as the S3Options
        # constructor has a debug check against use after S3 finalization.
        ensure_s3_initialized()

        if access_key is not None and secret_key is None:
            raise ValueError(
                'In order to initialize with explicit credentials both '
                'access_key and secret_key must be provided, '
                '`secret_key` is not set.'
            )
        elif access_key is None and secret_key is not None:
            raise ValueError(
                'In order to initialize with explicit credentials both '
                'access_key and secret_key must be provided, '
                '`access_key` is not set.'
            )

        elif session_token is not None and (access_key is None or
                                            secret_key is None):
            raise ValueError(
                'In order to initialize a session with temporary credentials, '
                'both secret_key and access_key must be provided in addition '
                'to session_token.'
            )

        elif (access_key is not None or secret_key is not None):
            if anonymous:
                raise ValueError(
                    'Cannot pass anonymous=True together with access_key '
                    'and secret_key.')

            if role_arn:
                raise ValueError(
                    'Cannot provide role_arn with access_key and secret_key')

            if session_token is None:
                session_token = ""

            options = CS3Options.FromAccessKey(
                tobytes(access_key),
                tobytes(secret_key),
                tobytes(session_token)
            )
        elif anonymous:
            if role_arn:
                raise ValueError(
                    'Cannot provide role_arn with anonymous=True')

            options = CS3Options.Anonymous()
        elif role_arn:
            if session_name is None:
                session_name = ''
            if external_id is None:
                external_id = ''

            options = CS3Options.FromAssumeRole(
                tobytes(role_arn),
                tobytes(session_name),
                tobytes(external_id),
                load_frequency
            )
        else:
            options = CS3Options.Defaults()
Loading ...