Learn more  » Push, build, and install  RubyGems npm packages Python packages Maven artifacts PHP packages Go Modules Bower components Debian packages RPM packages NuGet packages

aroundthecode / SQLAlchemy   python

Repository URL to install this package:

Version: 1.2.10 

/ orm / loading.py

# orm/loading.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php

"""private module containing functions used to convert database
rows into object instances and associated state.

the functions here are called primarily by Query, Mapper,
as well as some of the attribute loading strategies.

"""
from __future__ import absolute_import

from .. import util
from . import attributes, exc as orm_exc
from ..sql import util as sql_util
from . import strategy_options
from . import path_registry
from .. import sql

from .util import _none_set, state_str
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
from .. import exc as sa_exc
import collections

_new_runid = util.counter()


def instances(query, cursor, context):
    """Return an ORM result as an iterator."""

    context.runid = _new_runid()
    context.post_load_paths = {}

    filtered = query._has_mapper_entities

    single_entity = not query._only_return_tuples and \
        len(query._entities) == 1 and \
        query._entities[0].supports_single_entity

    if filtered:
        if single_entity:
            filter_fn = id
        else:
            def filter_fn(row):
                return tuple(
                    id(item)
                    if ent.use_id_for_hash
                    else item
                    for ent, item in zip(query._entities, row)
                )

    try:
        (process, labels) = \
            list(zip(*[
                query_entity.row_processor(query,
                                           context, cursor)
                for query_entity in query._entities
            ]))

        if not single_entity:
            keyed_tuple = util.lightweight_named_tuple('result', labels)

        while True:
            context.partials = {}

            if query._yield_per:
                fetch = cursor.fetchmany(query._yield_per)
                if not fetch:
                    break
            else:
                fetch = cursor.fetchall()

            if single_entity:
                proc = process[0]
                rows = [proc(row) for row in fetch]
            else:
                rows = [keyed_tuple([proc(row) for proc in process])
                        for row in fetch]

            for path, post_load in \
                    context.post_load_paths.items():
                post_load.invoke(context, path)

            if filtered:
                rows = util.unique_list(rows, filter_fn)

            for row in rows:
                yield row

            if not query._yield_per:
                break
    except Exception as err:
        cursor.close()
        util.raise_from_cause(err)


@util.dependencies("sqlalchemy.orm.query")
def merge_result(querylib, query, iterator, load=True):
    """Merge a result into this :class:`.Query` object's Session."""

    session = query.session
    if load:
        # flush current contents if we expect to load data
        session._autoflush()

    autoflush = session.autoflush
    try:
        session.autoflush = False
        single_entity = len(query._entities) == 1
        if single_entity:
            if isinstance(query._entities[0], querylib._MapperEntity):
                result = [session._merge(
                    attributes.instance_state(instance),
                    attributes.instance_dict(instance),
                    load=load, _recursive={}, _resolve_conflict_map={})
                    for instance in iterator]
            else:
                result = list(iterator)
        else:
            mapped_entities = [i for i, e in enumerate(query._entities)
                               if isinstance(e, querylib._MapperEntity)]
            result = []
            keys = [ent._label_name for ent in query._entities]
            keyed_tuple = util.lightweight_named_tuple('result', keys)
            for row in iterator:
                newrow = list(row)
                for i in mapped_entities:
                    if newrow[i] is not None:
                        newrow[i] = session._merge(
                            attributes.instance_state(newrow[i]),
                            attributes.instance_dict(newrow[i]),
                            load=load, _recursive={}, _resolve_conflict_map={})
                result.append(keyed_tuple(newrow))

        return iter(result)
    finally:
        session.autoflush = autoflush


def get_from_identity(session, key, passive):
    """Look up the given key in the given session's identity map,
    check the object for expired state if found.

    """
    instance = session.identity_map.get(key)
    if instance is not None:

        state = attributes.instance_state(instance)

        # expired - ensure it still exists
        if state.expired:
            if not passive & attributes.SQL_OK:
                # TODO: no coverage here
                return attributes.PASSIVE_NO_RESULT
            elif not passive & attributes.RELATED_OBJECT_OK:
                # this mode is used within a flush and the instance's
                # expired state will be checked soon enough, if necessary
                return instance
            try:
                state._load_expired(state, passive)
            except orm_exc.ObjectDeletedError:
                session._remove_newly_deleted([state])
                return None
        return instance
    else:
        return None


def load_on_ident(query, key,
                  refresh_state=None, with_for_update=None,
                  only_load_props=None):
    """Load the given identity key from the database."""

    if key is not None:
        ident = key[1]
        identity_token = key[2]
    else:
        ident = identity_token = None

    return load_on_pk_identity(
        query, ident, refresh_state=refresh_state,
        with_for_update=with_for_update,
        only_load_props=only_load_props,
        identity_token=identity_token
    )


def load_on_pk_identity(query, primary_key_identity,
                        refresh_state=None, with_for_update=None,
                        only_load_props=None, identity_token=None):

    """Load the given primary key identity from the database."""

    if refresh_state is None:
        q = query._clone()
        q._get_condition()
    else:
        q = query._clone()

    if primary_key_identity is not None:
        mapper = query._mapper_zero()

        (_get_clause, _get_params) = mapper._get_clause

        # None present in ident - turn those comparisons
        # into "IS NULL"
        if None in primary_key_identity:
            nones = set([
                        _get_params[col].key for col, value in
                        zip(mapper.primary_key, primary_key_identity)
                        if value is None
                        ])
            _get_clause = sql_util.adapt_criterion_to_null(
                _get_clause, nones)

        _get_clause = q._adapt_clause(_get_clause, True, False)
        q._criterion = _get_clause

        params = dict([
            (_get_params[primary_key].key, id_val)
            for id_val, primary_key
            in zip(primary_key_identity, mapper.primary_key)
        ])

        q._params = params

    # with_for_update needs to be query.LockmodeArg()
    if with_for_update is not None:
        version_check = True
        q._for_update_arg = with_for_update
    elif query._for_update_arg is not None:
        version_check = True
        q._for_update_arg = query._for_update_arg
    else:
        version_check = False

    q._get_options(
        populate_existing=bool(refresh_state),
        version_check=version_check,
        only_load_props=only_load_props,
        refresh_state=refresh_state,
        identity_token=identity_token)
    q._order_by = None

    try:
        return q.one()
    except orm_exc.NoResultFound:
        return None


def _setup_entity_query(
    context, mapper, query_entity,
        path, adapter, column_collection,
        with_polymorphic=None, only_load_props=None,
        polymorphic_discriminator=None, **kw):

    if with_polymorphic:
        poly_properties = mapper._iterate_polymorphic_properties(
            with_polymorphic)
    else:
        poly_properties = mapper._polymorphic_properties

    quick_populators = {}

    path.set(
        context.attributes,
        "memoized_setups",
        quick_populators)

    for value in poly_properties:
        if only_load_props and \
                value.key not in only_load_props:
            continue
        value.setup(
            context,
            query_entity,
            path,
            adapter,
            only_load_props=only_load_props,
            column_collection=column_collection,
            memoized_populators=quick_populators,
            **kw
        )

    if polymorphic_discriminator is not None and \
        polymorphic_discriminator \
            is not mapper.polymorphic_on:

        if adapter:
            pd = adapter.columns[polymorphic_discriminator]
        else:
            pd = polymorphic_discriminator
        column_collection.append(pd)


def _instance_processor(
        mapper, context, result, path, adapter,
        only_load_props=None, refresh_state=None,
        polymorphic_discriminator=None,
        _polymorphic_from=None):
    """Produce a mapper level row processor callable
       which processes rows into mapped instances."""

    # note that this method, most of which exists in a closure
    # called _instance(), resists being broken out, as
    # attempts to do so tend to add significant function
    # call overhead.  _instance() is the most
    # performance-critical section in the whole ORM.

    pk_cols = mapper.primary_key

    if adapter:
        pk_cols = [adapter.columns[c] for c in pk_cols]

    identity_class = mapper._identity_class

    populators = collections.defaultdict(list)

    props = mapper._prop_set
    if only_load_props is not None:
        props = props.intersection(
            mapper._props[k] for k in only_load_props)

    quick_populators = path.get(
        context.attributes, "memoized_setups", _none_set)

    for prop in props:
        if prop in quick_populators:
            # this is an inlined path just for column-based attributes.
            col = quick_populators[prop]
            if col is _DEFER_FOR_STATE:
                populators["new"].append(
                    (prop.key, prop._deferred_column_loader))
            elif col is _SET_DEFERRED_EXPIRED:
                # note that in this path, we are no longer
                # searching in the result to see if the column might
                # be present in some unexpected way.
                populators["expire"].append((prop.key, False))
            else:
                getter = None
                # the "adapter" can be here via different paths,
Loading ...