Update 2025-04-24_11:44:19
This commit is contained in:
170
venv/lib/python3.11/site-packages/sqlalchemy/orm/__init__.py
Normal file
170
venv/lib/python3.11/site-packages/sqlalchemy/orm/__init__.py
Normal file
@ -0,0 +1,170 @@
|
||||
# orm/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Functional constructs for ORM configuration.
|
||||
|
||||
See the SQLAlchemy object relational tutorial and mapper configuration
|
||||
documentation for an overview of how this module is used.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from . import exc as exc
|
||||
from . import mapper as mapperlib
|
||||
from . import strategy_options as strategy_options
|
||||
from ._orm_constructors import _mapper_fn as mapper
|
||||
from ._orm_constructors import aliased as aliased
|
||||
from ._orm_constructors import backref as backref
|
||||
from ._orm_constructors import clear_mappers as clear_mappers
|
||||
from ._orm_constructors import column_property as column_property
|
||||
from ._orm_constructors import composite as composite
|
||||
from ._orm_constructors import contains_alias as contains_alias
|
||||
from ._orm_constructors import create_session as create_session
|
||||
from ._orm_constructors import deferred as deferred
|
||||
from ._orm_constructors import dynamic_loader as dynamic_loader
|
||||
from ._orm_constructors import join as join
|
||||
from ._orm_constructors import mapped_column as mapped_column
|
||||
from ._orm_constructors import orm_insert_sentinel as orm_insert_sentinel
|
||||
from ._orm_constructors import outerjoin as outerjoin
|
||||
from ._orm_constructors import query_expression as query_expression
|
||||
from ._orm_constructors import relationship as relationship
|
||||
from ._orm_constructors import synonym as synonym
|
||||
from ._orm_constructors import with_loader_criteria as with_loader_criteria
|
||||
from ._orm_constructors import with_polymorphic as with_polymorphic
|
||||
from .attributes import AttributeEventToken as AttributeEventToken
|
||||
from .attributes import InstrumentedAttribute as InstrumentedAttribute
|
||||
from .attributes import QueryableAttribute as QueryableAttribute
|
||||
from .base import class_mapper as class_mapper
|
||||
from .base import DynamicMapped as DynamicMapped
|
||||
from .base import InspectionAttrExtensionType as InspectionAttrExtensionType
|
||||
from .base import LoaderCallableStatus as LoaderCallableStatus
|
||||
from .base import Mapped as Mapped
|
||||
from .base import NotExtension as NotExtension
|
||||
from .base import ORMDescriptor as ORMDescriptor
|
||||
from .base import PassiveFlag as PassiveFlag
|
||||
from .base import SQLORMExpression as SQLORMExpression
|
||||
from .base import WriteOnlyMapped as WriteOnlyMapped
|
||||
from .context import FromStatement as FromStatement
|
||||
from .context import QueryContext as QueryContext
|
||||
from .decl_api import add_mapped_attribute as add_mapped_attribute
|
||||
from .decl_api import as_declarative as as_declarative
|
||||
from .decl_api import declarative_base as declarative_base
|
||||
from .decl_api import declarative_mixin as declarative_mixin
|
||||
from .decl_api import DeclarativeBase as DeclarativeBase
|
||||
from .decl_api import DeclarativeBaseNoMeta as DeclarativeBaseNoMeta
|
||||
from .decl_api import DeclarativeMeta as DeclarativeMeta
|
||||
from .decl_api import declared_attr as declared_attr
|
||||
from .decl_api import has_inherited_table as has_inherited_table
|
||||
from .decl_api import MappedAsDataclass as MappedAsDataclass
|
||||
from .decl_api import registry as registry
|
||||
from .decl_api import synonym_for as synonym_for
|
||||
from .decl_base import MappedClassProtocol as MappedClassProtocol
|
||||
from .descriptor_props import Composite as Composite
|
||||
from .descriptor_props import CompositeProperty as CompositeProperty
|
||||
from .descriptor_props import Synonym as Synonym
|
||||
from .descriptor_props import SynonymProperty as SynonymProperty
|
||||
from .dynamic import AppenderQuery as AppenderQuery
|
||||
from .events import AttributeEvents as AttributeEvents
|
||||
from .events import InstanceEvents as InstanceEvents
|
||||
from .events import InstrumentationEvents as InstrumentationEvents
|
||||
from .events import MapperEvents as MapperEvents
|
||||
from .events import QueryEvents as QueryEvents
|
||||
from .events import SessionEvents as SessionEvents
|
||||
from .identity import IdentityMap as IdentityMap
|
||||
from .instrumentation import ClassManager as ClassManager
|
||||
from .interfaces import EXT_CONTINUE as EXT_CONTINUE
|
||||
from .interfaces import EXT_SKIP as EXT_SKIP
|
||||
from .interfaces import EXT_STOP as EXT_STOP
|
||||
from .interfaces import InspectionAttr as InspectionAttr
|
||||
from .interfaces import InspectionAttrInfo as InspectionAttrInfo
|
||||
from .interfaces import MANYTOMANY as MANYTOMANY
|
||||
from .interfaces import MANYTOONE as MANYTOONE
|
||||
from .interfaces import MapperProperty as MapperProperty
|
||||
from .interfaces import NO_KEY as NO_KEY
|
||||
from .interfaces import NO_VALUE as NO_VALUE
|
||||
from .interfaces import ONETOMANY as ONETOMANY
|
||||
from .interfaces import PropComparator as PropComparator
|
||||
from .interfaces import RelationshipDirection as RelationshipDirection
|
||||
from .interfaces import UserDefinedOption as UserDefinedOption
|
||||
from .loading import merge_frozen_result as merge_frozen_result
|
||||
from .loading import merge_result as merge_result
|
||||
from .mapped_collection import attribute_keyed_dict as attribute_keyed_dict
|
||||
from .mapped_collection import (
|
||||
attribute_mapped_collection as attribute_mapped_collection,
|
||||
)
|
||||
from .mapped_collection import column_keyed_dict as column_keyed_dict
|
||||
from .mapped_collection import (
|
||||
column_mapped_collection as column_mapped_collection,
|
||||
)
|
||||
from .mapped_collection import keyfunc_mapping as keyfunc_mapping
|
||||
from .mapped_collection import KeyFuncDict as KeyFuncDict
|
||||
from .mapped_collection import mapped_collection as mapped_collection
|
||||
from .mapped_collection import MappedCollection as MappedCollection
|
||||
from .mapper import configure_mappers as configure_mappers
|
||||
from .mapper import Mapper as Mapper
|
||||
from .mapper import reconstructor as reconstructor
|
||||
from .mapper import validates as validates
|
||||
from .properties import ColumnProperty as ColumnProperty
|
||||
from .properties import MappedColumn as MappedColumn
|
||||
from .properties import MappedSQLExpression as MappedSQLExpression
|
||||
from .query import AliasOption as AliasOption
|
||||
from .query import Query as Query
|
||||
from .relationships import foreign as foreign
|
||||
from .relationships import Relationship as Relationship
|
||||
from .relationships import RelationshipProperty as RelationshipProperty
|
||||
from .relationships import remote as remote
|
||||
from .scoping import QueryPropertyDescriptor as QueryPropertyDescriptor
|
||||
from .scoping import scoped_session as scoped_session
|
||||
from .session import close_all_sessions as close_all_sessions
|
||||
from .session import make_transient as make_transient
|
||||
from .session import make_transient_to_detached as make_transient_to_detached
|
||||
from .session import object_session as object_session
|
||||
from .session import ORMExecuteState as ORMExecuteState
|
||||
from .session import Session as Session
|
||||
from .session import sessionmaker as sessionmaker
|
||||
from .session import SessionTransaction as SessionTransaction
|
||||
from .session import SessionTransactionOrigin as SessionTransactionOrigin
|
||||
from .state import AttributeState as AttributeState
|
||||
from .state import InstanceState as InstanceState
|
||||
from .strategy_options import contains_eager as contains_eager
|
||||
from .strategy_options import defaultload as defaultload
|
||||
from .strategy_options import defer as defer
|
||||
from .strategy_options import immediateload as immediateload
|
||||
from .strategy_options import joinedload as joinedload
|
||||
from .strategy_options import lazyload as lazyload
|
||||
from .strategy_options import Load as Load
|
||||
from .strategy_options import load_only as load_only
|
||||
from .strategy_options import noload as noload
|
||||
from .strategy_options import raiseload as raiseload
|
||||
from .strategy_options import selectin_polymorphic as selectin_polymorphic
|
||||
from .strategy_options import selectinload as selectinload
|
||||
from .strategy_options import subqueryload as subqueryload
|
||||
from .strategy_options import undefer as undefer
|
||||
from .strategy_options import undefer_group as undefer_group
|
||||
from .strategy_options import with_expression as with_expression
|
||||
from .unitofwork import UOWTransaction as UOWTransaction
|
||||
from .util import Bundle as Bundle
|
||||
from .util import CascadeOptions as CascadeOptions
|
||||
from .util import LoaderCriteriaOption as LoaderCriteriaOption
|
||||
from .util import object_mapper as object_mapper
|
||||
from .util import polymorphic_union as polymorphic_union
|
||||
from .util import was_deleted as was_deleted
|
||||
from .util import with_parent as with_parent
|
||||
from .writeonly import WriteOnlyCollection as WriteOnlyCollection
|
||||
from .. import util as _sa_util
|
||||
|
||||
|
||||
def __go(lcls: Any) -> None:
|
||||
_sa_util.preloaded.import_prefix("sqlalchemy.orm")
|
||||
_sa_util.preloaded.import_prefix("sqlalchemy.ext")
|
||||
|
||||
|
||||
__go(locals())
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
179
venv/lib/python3.11/site-packages/sqlalchemy/orm/_typing.py
Normal file
179
venv/lib/python3.11/site-packages/sqlalchemy/orm/_typing.py
Normal file
@ -0,0 +1,179 @@
|
||||
# orm/_typing.py
|
||||
# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from ..engine.interfaces import _CoreKnownExecutionOptions
|
||||
from ..sql import roles
|
||||
from ..sql._orm_types import DMLStrategyArgument as DMLStrategyArgument
|
||||
from ..sql._orm_types import (
|
||||
SynchronizeSessionArgument as SynchronizeSessionArgument,
|
||||
)
|
||||
from ..sql._typing import _HasClauseElement
|
||||
from ..sql.elements import ColumnElement
|
||||
from ..util.typing import Protocol
|
||||
from ..util.typing import TypeGuard
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .attributes import AttributeImpl
|
||||
from .attributes import CollectionAttributeImpl
|
||||
from .attributes import HasCollectionAdapter
|
||||
from .attributes import QueryableAttribute
|
||||
from .base import PassiveFlag
|
||||
from .decl_api import registry as _registry_type
|
||||
from .interfaces import InspectionAttr
|
||||
from .interfaces import MapperProperty
|
||||
from .interfaces import ORMOption
|
||||
from .interfaces import UserDefinedOption
|
||||
from .mapper import Mapper
|
||||
from .relationships import RelationshipProperty
|
||||
from .state import InstanceState
|
||||
from .util import AliasedClass
|
||||
from .util import AliasedInsp
|
||||
from ..sql._typing import _CE
|
||||
from ..sql.base import ExecutableOption
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
|
||||
_T_co = TypeVar("_T_co", bound=Any, covariant=True)
|
||||
|
||||
_O = TypeVar("_O", bound=object)
|
||||
"""The 'ORM mapped object' type.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
_RegistryType = _registry_type
|
||||
|
||||
_InternalEntityType = Union["Mapper[_T]", "AliasedInsp[_T]"]
|
||||
|
||||
_ExternalEntityType = Union[Type[_T], "AliasedClass[_T]"]
|
||||
|
||||
_EntityType = Union[
|
||||
Type[_T], "AliasedClass[_T]", "Mapper[_T]", "AliasedInsp[_T]"
|
||||
]
|
||||
|
||||
|
||||
_ClassDict = Mapping[str, Any]
|
||||
_InstanceDict = Dict[str, Any]
|
||||
|
||||
_IdentityKeyType = Tuple[Type[_T], Tuple[Any, ...], Optional[Any]]
|
||||
|
||||
_ORMColumnExprArgument = Union[
|
||||
ColumnElement[_T],
|
||||
_HasClauseElement[_T],
|
||||
roles.ExpressionElementRole[_T],
|
||||
]
|
||||
|
||||
|
||||
_ORMCOLEXPR = TypeVar("_ORMCOLEXPR", bound=ColumnElement[Any])
|
||||
|
||||
|
||||
class _OrmKnownExecutionOptions(_CoreKnownExecutionOptions, total=False):
|
||||
populate_existing: bool
|
||||
autoflush: bool
|
||||
synchronize_session: SynchronizeSessionArgument
|
||||
dml_strategy: DMLStrategyArgument
|
||||
is_delete_using: bool
|
||||
is_update_from: bool
|
||||
render_nulls: bool
|
||||
|
||||
|
||||
OrmExecuteOptionsParameter = Union[
|
||||
_OrmKnownExecutionOptions, Mapping[str, Any]
|
||||
]
|
||||
|
||||
|
||||
class _ORMAdapterProto(Protocol):
|
||||
"""protocol for the :class:`.AliasedInsp._orm_adapt_element` method
|
||||
which is a synonym for :class:`.AliasedInsp._adapt_element`.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: ...
|
||||
|
||||
|
||||
class _LoaderCallable(Protocol):
|
||||
def __call__(
|
||||
self, state: InstanceState[Any], passive: PassiveFlag
|
||||
) -> Any: ...
|
||||
|
||||
|
||||
def is_orm_option(
|
||||
opt: ExecutableOption,
|
||||
) -> TypeGuard[ORMOption]:
|
||||
return not opt._is_core
|
||||
|
||||
|
||||
def is_user_defined_option(
|
||||
opt: ExecutableOption,
|
||||
) -> TypeGuard[UserDefinedOption]:
|
||||
return not opt._is_core and opt._is_user_defined # type: ignore
|
||||
|
||||
|
||||
def is_composite_class(obj: Any) -> bool:
|
||||
# inlining is_dataclass(obj)
|
||||
return hasattr(obj, "__composite_values__") or hasattr(
|
||||
obj, "__dataclass_fields__"
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def insp_is_mapper_property(
|
||||
obj: Any,
|
||||
) -> TypeGuard[MapperProperty[Any]]: ...
|
||||
|
||||
def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: ...
|
||||
|
||||
def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: ...
|
||||
|
||||
def insp_is_attribute(
|
||||
obj: InspectionAttr,
|
||||
) -> TypeGuard[QueryableAttribute[Any]]: ...
|
||||
|
||||
def attr_is_internal_proxy(
|
||||
obj: InspectionAttr,
|
||||
) -> TypeGuard[QueryableAttribute[Any]]: ...
|
||||
|
||||
def prop_is_relationship(
|
||||
prop: MapperProperty[Any],
|
||||
) -> TypeGuard[RelationshipProperty[Any]]: ...
|
||||
|
||||
def is_collection_impl(
|
||||
impl: AttributeImpl,
|
||||
) -> TypeGuard[CollectionAttributeImpl]: ...
|
||||
|
||||
def is_has_collection_adapter(
|
||||
impl: AttributeImpl,
|
||||
) -> TypeGuard[HasCollectionAdapter]: ...
|
||||
|
||||
else:
|
||||
insp_is_mapper_property = operator.attrgetter("is_property")
|
||||
insp_is_mapper = operator.attrgetter("is_mapper")
|
||||
insp_is_aliased_class = operator.attrgetter("is_aliased_class")
|
||||
insp_is_attribute = operator.attrgetter("is_attribute")
|
||||
attr_is_internal_proxy = operator.attrgetter("_is_internal_proxy")
|
||||
is_collection_impl = operator.attrgetter("collection")
|
||||
prop_is_relationship = operator.attrgetter("_is_relationship")
|
||||
is_has_collection_adapter = operator.attrgetter(
|
||||
"_is_has_collection_adapter"
|
||||
)
|
2835
venv/lib/python3.11/site-packages/sqlalchemy/orm/attributes.py
Normal file
2835
venv/lib/python3.11/site-packages/sqlalchemy/orm/attributes.py
Normal file
File diff suppressed because it is too large
Load Diff
973
venv/lib/python3.11/site-packages/sqlalchemy/orm/base.py
Normal file
973
venv/lib/python3.11/site-packages/sqlalchemy/orm/base.py
Normal file
@ -0,0 +1,973 @@
|
||||
# orm/base.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Constants and rudimental functions used throughout the ORM.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
import operator
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import no_type_check
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from . import exc
|
||||
from ._typing import insp_is_mapper
|
||||
from .. import exc as sa_exc
|
||||
from .. import inspection
|
||||
from .. import util
|
||||
from ..sql import roles
|
||||
from ..sql.elements import SQLColumnExpression
|
||||
from ..sql.elements import SQLCoreOperations
|
||||
from ..util import FastIntFlag
|
||||
from ..util.langhelpers import TypingOnly
|
||||
from ..util.typing import Literal
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._typing import _EntityType
|
||||
from ._typing import _ExternalEntityType
|
||||
from ._typing import _InternalEntityType
|
||||
from .attributes import InstrumentedAttribute
|
||||
from .dynamic import AppenderQuery
|
||||
from .instrumentation import ClassManager
|
||||
from .interfaces import PropComparator
|
||||
from .mapper import Mapper
|
||||
from .state import InstanceState
|
||||
from .util import AliasedClass
|
||||
from .writeonly import WriteOnlyCollection
|
||||
from ..sql._typing import _ColumnExpressionArgument
|
||||
from ..sql._typing import _InfoType
|
||||
from ..sql.elements import ColumnElement
|
||||
from ..sql.operators import OperatorType
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
_T_co = TypeVar("_T_co", bound=Any, covariant=True)
|
||||
|
||||
_O = TypeVar("_O", bound=object)
|
||||
|
||||
|
||||
class LoaderCallableStatus(Enum):
|
||||
PASSIVE_NO_RESULT = 0
|
||||
"""Symbol returned by a loader callable or other attribute/history
|
||||
retrieval operation when a value could not be determined, based
|
||||
on loader callable flags.
|
||||
"""
|
||||
|
||||
PASSIVE_CLASS_MISMATCH = 1
|
||||
"""Symbol indicating that an object is locally present for a given
|
||||
primary key identity but it is not of the requested class. The
|
||||
return value is therefore None and no SQL should be emitted."""
|
||||
|
||||
ATTR_WAS_SET = 2
|
||||
"""Symbol returned by a loader callable to indicate the
|
||||
retrieved value, or values, were assigned to their attributes
|
||||
on the target object.
|
||||
"""
|
||||
|
||||
ATTR_EMPTY = 3
|
||||
"""Symbol used internally to indicate an attribute had no callable."""
|
||||
|
||||
NO_VALUE = 4
|
||||
"""Symbol which may be placed as the 'previous' value of an attribute,
|
||||
indicating no value was loaded for an attribute when it was modified,
|
||||
and flags indicated we were not to load it.
|
||||
"""
|
||||
|
||||
NEVER_SET = NO_VALUE
|
||||
"""
|
||||
Synonymous with NO_VALUE
|
||||
|
||||
.. versionchanged:: 1.4 NEVER_SET was merged with NO_VALUE
|
||||
|
||||
"""
|
||||
|
||||
|
||||
(
|
||||
PASSIVE_NO_RESULT,
|
||||
PASSIVE_CLASS_MISMATCH,
|
||||
ATTR_WAS_SET,
|
||||
ATTR_EMPTY,
|
||||
NO_VALUE,
|
||||
) = tuple(LoaderCallableStatus)
|
||||
|
||||
NEVER_SET = NO_VALUE
|
||||
|
||||
|
||||
class PassiveFlag(FastIntFlag):
|
||||
"""Bitflag interface that passes options onto loader callables"""
|
||||
|
||||
NO_CHANGE = 0
|
||||
"""No callables or SQL should be emitted on attribute access
|
||||
and no state should change
|
||||
"""
|
||||
|
||||
CALLABLES_OK = 1
|
||||
"""Loader callables can be fired off if a value
|
||||
is not present.
|
||||
"""
|
||||
|
||||
SQL_OK = 2
|
||||
"""Loader callables can emit SQL at least on scalar value attributes."""
|
||||
|
||||
RELATED_OBJECT_OK = 4
|
||||
"""Callables can use SQL to load related objects as well
|
||||
as scalar value attributes.
|
||||
"""
|
||||
|
||||
INIT_OK = 8
|
||||
"""Attributes should be initialized with a blank
|
||||
value (None or an empty collection) upon get, if no other
|
||||
value can be obtained.
|
||||
"""
|
||||
|
||||
NON_PERSISTENT_OK = 16
|
||||
"""Callables can be emitted if the parent is not persistent."""
|
||||
|
||||
LOAD_AGAINST_COMMITTED = 32
|
||||
"""Callables should use committed values as primary/foreign keys during a
|
||||
load.
|
||||
"""
|
||||
|
||||
NO_AUTOFLUSH = 64
|
||||
"""Loader callables should disable autoflush."""
|
||||
|
||||
NO_RAISE = 128
|
||||
"""Loader callables should not raise any assertions"""
|
||||
|
||||
DEFERRED_HISTORY_LOAD = 256
|
||||
"""indicates special load of the previous value of an attribute"""
|
||||
|
||||
INCLUDE_PENDING_MUTATIONS = 512
|
||||
|
||||
# pre-packaged sets of flags used as inputs
|
||||
PASSIVE_OFF = (
|
||||
RELATED_OBJECT_OK | NON_PERSISTENT_OK | INIT_OK | CALLABLES_OK | SQL_OK
|
||||
)
|
||||
"Callables can be emitted in all cases."
|
||||
|
||||
PASSIVE_RETURN_NO_VALUE = PASSIVE_OFF ^ INIT_OK
|
||||
"""PASSIVE_OFF ^ INIT_OK"""
|
||||
|
||||
PASSIVE_NO_INITIALIZE = PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK
|
||||
"PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK"
|
||||
|
||||
PASSIVE_NO_FETCH = PASSIVE_OFF ^ SQL_OK
|
||||
"PASSIVE_OFF ^ SQL_OK"
|
||||
|
||||
PASSIVE_NO_FETCH_RELATED = PASSIVE_OFF ^ RELATED_OBJECT_OK
|
||||
"PASSIVE_OFF ^ RELATED_OBJECT_OK"
|
||||
|
||||
PASSIVE_ONLY_PERSISTENT = PASSIVE_OFF ^ NON_PERSISTENT_OK
|
||||
"PASSIVE_OFF ^ NON_PERSISTENT_OK"
|
||||
|
||||
PASSIVE_MERGE = PASSIVE_OFF | NO_RAISE
|
||||
"""PASSIVE_OFF | NO_RAISE
|
||||
|
||||
Symbol used specifically for session.merge() and similar cases
|
||||
|
||||
"""
|
||||
|
||||
|
||||
(
|
||||
NO_CHANGE,
|
||||
CALLABLES_OK,
|
||||
SQL_OK,
|
||||
RELATED_OBJECT_OK,
|
||||
INIT_OK,
|
||||
NON_PERSISTENT_OK,
|
||||
LOAD_AGAINST_COMMITTED,
|
||||
NO_AUTOFLUSH,
|
||||
NO_RAISE,
|
||||
DEFERRED_HISTORY_LOAD,
|
||||
INCLUDE_PENDING_MUTATIONS,
|
||||
PASSIVE_OFF,
|
||||
PASSIVE_RETURN_NO_VALUE,
|
||||
PASSIVE_NO_INITIALIZE,
|
||||
PASSIVE_NO_FETCH,
|
||||
PASSIVE_NO_FETCH_RELATED,
|
||||
PASSIVE_ONLY_PERSISTENT,
|
||||
PASSIVE_MERGE,
|
||||
) = PassiveFlag.__members__.values()
|
||||
|
||||
DEFAULT_MANAGER_ATTR = "_sa_class_manager"
|
||||
DEFAULT_STATE_ATTR = "_sa_instance_state"
|
||||
|
||||
|
||||
class EventConstants(Enum):
|
||||
EXT_CONTINUE = 1
|
||||
EXT_STOP = 2
|
||||
EXT_SKIP = 3
|
||||
NO_KEY = 4
|
||||
"""indicates an :class:`.AttributeEvent` event that did not have any
|
||||
key argument.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
"""
|
||||
|
||||
|
||||
EXT_CONTINUE, EXT_STOP, EXT_SKIP, NO_KEY = tuple(EventConstants)
|
||||
|
||||
|
||||
class RelationshipDirection(Enum):
|
||||
"""enumeration which indicates the 'direction' of a
|
||||
:class:`_orm.RelationshipProperty`.
|
||||
|
||||
:class:`.RelationshipDirection` is accessible from the
|
||||
:attr:`_orm.Relationship.direction` attribute of
|
||||
:class:`_orm.RelationshipProperty`.
|
||||
|
||||
"""
|
||||
|
||||
ONETOMANY = 1
|
||||
"""Indicates the one-to-many direction for a :func:`_orm.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
"""
|
||||
|
||||
MANYTOONE = 2
|
||||
"""Indicates the many-to-one direction for a :func:`_orm.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
"""
|
||||
|
||||
MANYTOMANY = 3
|
||||
"""Indicates the many-to-many direction for a :func:`_orm.relationship`.
|
||||
|
||||
This symbol is typically used by the internals but may be exposed within
|
||||
certain API features.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
ONETOMANY, MANYTOONE, MANYTOMANY = tuple(RelationshipDirection)
|
||||
|
||||
|
||||
class InspectionAttrExtensionType(Enum):
|
||||
"""Symbols indicating the type of extension that a
|
||||
:class:`.InspectionAttr` is part of."""
|
||||
|
||||
|
||||
class NotExtension(InspectionAttrExtensionType):
|
||||
NOT_EXTENSION = "not_extension"
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
not part of sqlalchemy.ext.
|
||||
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attribute.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
_never_set = frozenset([NEVER_SET])
|
||||
|
||||
_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
|
||||
|
||||
_none_only_set = frozenset([None])
|
||||
|
||||
_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
|
||||
|
||||
_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
|
||||
|
||||
_RAISE_FOR_STATE = util.symbol("RAISE_FOR_STATE")
|
||||
|
||||
|
||||
_F = TypeVar("_F", bound=Callable[..., Any])
|
||||
_Self = TypeVar("_Self")
|
||||
|
||||
|
||||
def _assertions(
|
||||
*assertions: Any,
|
||||
) -> Callable[[_F], _F]:
|
||||
@util.decorator
|
||||
def generate(fn: _F, self: _Self, *args: Any, **kw: Any) -> _Self:
|
||||
for assertion in assertions:
|
||||
assertion(self, fn.__name__)
|
||||
fn(self, *args, **kw)
|
||||
return self
|
||||
|
||||
return generate
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: ...
|
||||
|
||||
@overload
|
||||
def opt_manager_of_class(cls: AliasedClass[Any]) -> None: ...
|
||||
|
||||
@overload
|
||||
def opt_manager_of_class(
|
||||
cls: _ExternalEntityType[_O],
|
||||
) -> Optional[ClassManager[_O]]: ...
|
||||
|
||||
def opt_manager_of_class(
|
||||
cls: _ExternalEntityType[_O],
|
||||
) -> Optional[ClassManager[_O]]: ...
|
||||
|
||||
def instance_state(instance: _O) -> InstanceState[_O]: ...
|
||||
|
||||
def instance_dict(instance: object) -> Dict[str, Any]: ...
|
||||
|
||||
else:
|
||||
# these can be replaced by sqlalchemy.ext.instrumentation
|
||||
# if augmented class instrumentation is enabled.
|
||||
|
||||
def manager_of_class(cls):
|
||||
try:
|
||||
return cls.__dict__[DEFAULT_MANAGER_ATTR]
|
||||
except KeyError as ke:
|
||||
raise exc.UnmappedClassError(
|
||||
cls, f"Can't locate an instrumentation manager for class {cls}"
|
||||
) from ke
|
||||
|
||||
def opt_manager_of_class(cls):
|
||||
return cls.__dict__.get(DEFAULT_MANAGER_ATTR)
|
||||
|
||||
instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
|
||||
|
||||
instance_dict = operator.attrgetter("__dict__")
|
||||
|
||||
|
||||
def instance_str(instance: object) -> str:
|
||||
"""Return a string describing an instance."""
|
||||
|
||||
return state_str(instance_state(instance))
|
||||
|
||||
|
||||
def state_str(state: InstanceState[Any]) -> str:
|
||||
"""Return a string describing an instance via its InstanceState."""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return "<%s at 0x%x>" % (state.class_.__name__, id(state.obj()))
|
||||
|
||||
|
||||
def state_class_str(state: InstanceState[Any]) -> str:
|
||||
"""Return a string describing an instance's class via its
|
||||
InstanceState.
|
||||
"""
|
||||
|
||||
if state is None:
|
||||
return "None"
|
||||
else:
|
||||
return "<%s>" % (state.class_.__name__,)
|
||||
|
||||
|
||||
def attribute_str(instance: object, attribute: str) -> str:
|
||||
return instance_str(instance) + "." + attribute
|
||||
|
||||
|
||||
def state_attribute_str(state: InstanceState[Any], attribute: str) -> str:
|
||||
return state_str(state) + "." + attribute
|
||||
|
||||
|
||||
def object_mapper(instance: _T) -> Mapper[_T]:
|
||||
"""Given an object, return the primary Mapper associated with the object
|
||||
instance.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
This function is available via the inspection system as::
|
||||
|
||||
inspect(instance).mapper
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
return object_state(instance).mapper
|
||||
|
||||
|
||||
def object_state(instance: _T) -> InstanceState[_T]:
|
||||
"""Given an object, return the :class:`.InstanceState`
|
||||
associated with the object.
|
||||
|
||||
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
|
||||
if no mapping is configured.
|
||||
|
||||
Equivalent functionality is available via the :func:`_sa.inspect`
|
||||
function as::
|
||||
|
||||
inspect(instance)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
|
||||
not part of a mapping.
|
||||
|
||||
"""
|
||||
state = _inspect_mapped_object(instance)
|
||||
if state is None:
|
||||
raise exc.UnmappedInstanceError(instance)
|
||||
else:
|
||||
return state
|
||||
|
||||
|
||||
@inspection._inspects(object)
|
||||
def _inspect_mapped_object(instance: _T) -> Optional[InstanceState[_T]]:
|
||||
try:
|
||||
return instance_state(instance)
|
||||
except (exc.UnmappedClassError,) + exc.NO_STATE:
|
||||
return None
|
||||
|
||||
|
||||
def _class_to_mapper(
|
||||
class_or_mapper: Union[Mapper[_T], Type[_T]]
|
||||
) -> Mapper[_T]:
|
||||
# can't get mypy to see an overload for this
|
||||
insp = inspection.inspect(class_or_mapper, False)
|
||||
if insp is not None:
|
||||
return insp.mapper # type: ignore
|
||||
else:
|
||||
assert isinstance(class_or_mapper, type)
|
||||
raise exc.UnmappedClassError(class_or_mapper)
|
||||
|
||||
|
||||
def _mapper_or_none(
|
||||
entity: Union[Type[_T], _InternalEntityType[_T]]
|
||||
) -> Optional[Mapper[_T]]:
|
||||
"""Return the :class:`_orm.Mapper` for the given class or None if the
|
||||
class is not mapped.
|
||||
"""
|
||||
|
||||
# can't get mypy to see an overload for this
|
||||
insp = inspection.inspect(entity, False)
|
||||
if insp is not None:
|
||||
return insp.mapper # type: ignore
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _is_mapped_class(entity: Any) -> bool:
|
||||
"""Return True if the given object is a mapped class,
|
||||
:class:`_orm.Mapper`, or :class:`.AliasedClass`.
|
||||
"""
|
||||
|
||||
insp = inspection.inspect(entity, False)
|
||||
return (
|
||||
insp is not None
|
||||
and not insp.is_clause_element
|
||||
and (insp.is_mapper or insp.is_aliased_class)
|
||||
)
|
||||
|
||||
|
||||
def _is_aliased_class(entity: Any) -> bool:
|
||||
insp = inspection.inspect(entity, False)
|
||||
return insp is not None and getattr(insp, "is_aliased_class", False)
|
||||
|
||||
|
||||
@no_type_check
|
||||
def _entity_descriptor(entity: _EntityType[Any], key: str) -> Any:
|
||||
"""Return a class attribute given an entity and string name.
|
||||
|
||||
May return :class:`.InstrumentedAttribute` or user-defined
|
||||
attribute.
|
||||
|
||||
"""
|
||||
insp = inspection.inspect(entity)
|
||||
if insp.is_selectable:
|
||||
description = entity
|
||||
entity = insp.c
|
||||
elif insp.is_aliased_class:
|
||||
entity = insp.entity
|
||||
description = entity
|
||||
elif hasattr(insp, "mapper"):
|
||||
description = entity = insp.mapper.class_
|
||||
else:
|
||||
description = entity
|
||||
|
||||
try:
|
||||
return getattr(entity, key)
|
||||
except AttributeError as err:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Entity '%s' has no property '%s'" % (description, key)
|
||||
) from err
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: ...
|
||||
|
||||
else:
|
||||
_state_mapper = util.dottedgetter("manager.mapper")
|
||||
|
||||
|
||||
def _inspect_mapped_class(
|
||||
class_: Type[_O], configure: bool = False
|
||||
) -> Optional[Mapper[_O]]:
|
||||
try:
|
||||
class_manager = opt_manager_of_class(class_)
|
||||
if class_manager is None or not class_manager.is_mapped:
|
||||
return None
|
||||
mapper = class_manager.mapper
|
||||
except exc.NO_STATE:
|
||||
return None
|
||||
else:
|
||||
if configure:
|
||||
mapper._check_configure()
|
||||
return mapper
|
||||
|
||||
|
||||
def _parse_mapper_argument(arg: Union[Mapper[_O], Type[_O]]) -> Mapper[_O]:
|
||||
insp = inspection.inspect(arg, raiseerr=False)
|
||||
if insp_is_mapper(insp):
|
||||
return insp
|
||||
|
||||
raise sa_exc.ArgumentError(f"Mapper or mapped class expected, got {arg!r}")
|
||||
|
||||
|
||||
def class_mapper(class_: Type[_O], configure: bool = True) -> Mapper[_O]:
|
||||
"""Given a class, return the primary :class:`_orm.Mapper` associated
|
||||
with the key.
|
||||
|
||||
Raises :exc:`.UnmappedClassError` if no mapping is configured
|
||||
on the given class, or :exc:`.ArgumentError` if a non-class
|
||||
object is passed.
|
||||
|
||||
Equivalent functionality is available via the :func:`_sa.inspect`
|
||||
function as::
|
||||
|
||||
inspect(some_mapped_class)
|
||||
|
||||
Using the inspection system will raise
|
||||
:class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
|
||||
|
||||
"""
|
||||
mapper = _inspect_mapped_class(class_, configure=configure)
|
||||
if mapper is None:
|
||||
if not isinstance(class_, type):
|
||||
raise sa_exc.ArgumentError(
|
||||
"Class object expected, got '%r'." % (class_,)
|
||||
)
|
||||
raise exc.UnmappedClassError(class_)
|
||||
else:
|
||||
return mapper
|
||||
|
||||
|
||||
class InspectionAttr:
|
||||
"""A base class applied to all ORM objects and attributes that are
|
||||
related to things that can be returned by the :func:`_sa.inspect` function.
|
||||
|
||||
The attributes defined here allow the usage of simple boolean
|
||||
checks to test basic facts about the object returned.
|
||||
|
||||
While the boolean checks here are basically the same as using
|
||||
the Python isinstance() function, the flags here can be used without
|
||||
the need to import all of these classes, and also such that
|
||||
the SQLAlchemy class system can change while leaving the flags
|
||||
here intact for forwards-compatibility.
|
||||
|
||||
"""
|
||||
|
||||
__slots__: Tuple[str, ...] = ()
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of
|
||||
:class:`_expression.Selectable`."""
|
||||
|
||||
is_aliased_class = False
|
||||
"""True if this object is an instance of :class:`.AliasedClass`."""
|
||||
|
||||
is_instance = False
|
||||
"""True if this object is an instance of :class:`.InstanceState`."""
|
||||
|
||||
is_mapper = False
|
||||
"""True if this object is an instance of :class:`_orm.Mapper`."""
|
||||
|
||||
is_bundle = False
|
||||
"""True if this object is an instance of :class:`.Bundle`."""
|
||||
|
||||
is_property = False
|
||||
"""True if this object is an instance of :class:`.MapperProperty`."""
|
||||
|
||||
is_attribute = False
|
||||
"""True if this object is a Python :term:`descriptor`.
|
||||
|
||||
This can refer to one of many types. Usually a
|
||||
:class:`.QueryableAttribute` which handles attributes events on behalf
|
||||
of a :class:`.MapperProperty`. But can also be an extension type
|
||||
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
|
||||
The :attr:`.InspectionAttr.extension_type` will refer to a constant
|
||||
identifying the specific subtype.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_orm.Mapper.all_orm_descriptors`
|
||||
|
||||
"""
|
||||
|
||||
_is_internal_proxy = False
|
||||
"""True if this object is an internal proxy object.
|
||||
|
||||
.. versionadded:: 1.2.12
|
||||
|
||||
"""
|
||||
|
||||
is_clause_element = False
|
||||
"""True if this object is an instance of
|
||||
:class:`_expression.ClauseElement`."""
|
||||
|
||||
extension_type: InspectionAttrExtensionType = NotExtension.NOT_EXTENSION
|
||||
"""The extension type, if any.
|
||||
Defaults to :attr:`.interfaces.NotExtension.NOT_EXTENSION`
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.HybridExtensionType`
|
||||
|
||||
:class:`.AssociationProxyExtensionType`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class InspectionAttrInfo(InspectionAttr):
|
||||
"""Adds the ``.info`` attribute to :class:`.InspectionAttr`.
|
||||
|
||||
The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo`
|
||||
is that the former is compatible as a mixin for classes that specify
|
||||
``__slots__``; this is essentially an implementation artifact.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
@util.ro_memoized_property
|
||||
def info(self) -> _InfoType:
|
||||
"""Info dictionary associated with the object, allowing user-defined
|
||||
data to be associated with this :class:`.InspectionAttr`.
|
||||
|
||||
The dictionary is generated when first accessed. Alternatively,
|
||||
it can be specified as a constructor argument to the
|
||||
:func:`.column_property`, :func:`_orm.relationship`, or
|
||||
:func:`.composite`
|
||||
functions.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.QueryableAttribute.info`
|
||||
|
||||
:attr:`.SchemaItem.info`
|
||||
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
||||
class SQLORMOperations(SQLCoreOperations[_T_co], TypingOnly):
|
||||
__slots__ = ()
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
def of_type(
|
||||
self, class_: _EntityType[Any]
|
||||
) -> PropComparator[_T_co]: ...
|
||||
|
||||
def and_(
|
||||
self, *criteria: _ColumnExpressionArgument[bool]
|
||||
) -> PropComparator[bool]: ...
|
||||
|
||||
def any( # noqa: A001
|
||||
self,
|
||||
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> ColumnElement[bool]: ...
|
||||
|
||||
def has(
|
||||
self,
|
||||
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> ColumnElement[bool]: ...
|
||||
|
||||
|
||||
class ORMDescriptor(Generic[_T_co], TypingOnly):
|
||||
"""Represent any Python descriptor that provides a SQL expression
|
||||
construct at the class level."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: Any, owner: Literal[None]
|
||||
) -> ORMDescriptor[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: Literal[None], owner: Any
|
||||
) -> SQLCoreOperations[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: Any) -> _T_co: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: object, owner: Any
|
||||
) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: ...
|
||||
|
||||
|
||||
class _MappedAnnotationBase(Generic[_T_co], TypingOnly):
|
||||
"""common class for Mapped and similar ORM container classes.
|
||||
|
||||
these are classes that can appear on the left side of an ORM declarative
|
||||
mapping, containing a mapped class or in some cases a collection
|
||||
surrounding a mapped class.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class SQLORMExpression(
|
||||
SQLORMOperations[_T_co], SQLColumnExpression[_T_co], TypingOnly
|
||||
):
|
||||
"""A type that may be used to indicate any ORM-level attribute or
|
||||
object that acts in place of one, in the context of SQL expression
|
||||
construction.
|
||||
|
||||
:class:`.SQLORMExpression` extends from the Core
|
||||
:class:`.SQLColumnExpression` to add additional SQL methods that are ORM
|
||||
specific, such as :meth:`.PropComparator.of_type`, and is part of the bases
|
||||
for :class:`.InstrumentedAttribute`. It may be used in :pep:`484` typing to
|
||||
indicate arguments or return values that should behave as ORM-level
|
||||
attribute expressions.
|
||||
|
||||
.. versionadded:: 2.0.0b4
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class Mapped(
|
||||
SQLORMExpression[_T_co],
|
||||
ORMDescriptor[_T_co],
|
||||
_MappedAnnotationBase[_T_co],
|
||||
roles.DDLConstraintColumnRole,
|
||||
):
|
||||
"""Represent an ORM mapped attribute on a mapped class.
|
||||
|
||||
This class represents the complete descriptor interface for any class
|
||||
attribute that will have been :term:`instrumented` by the ORM
|
||||
:class:`_orm.Mapper` class. Provides appropriate information to type
|
||||
checkers such as pylance and mypy so that ORM-mapped attributes
|
||||
are correctly typed.
|
||||
|
||||
The most prominent use of :class:`_orm.Mapped` is in
|
||||
the :ref:`Declarative Mapping <orm_explicit_declarative_base>` form
|
||||
of :class:`_orm.Mapper` configuration, where used explicitly it drives
|
||||
the configuration of ORM attributes such as :func:`_orm.mapped_class`
|
||||
and :func:`_orm.relationship`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`orm_explicit_declarative_base`
|
||||
|
||||
:ref:`orm_declarative_table`
|
||||
|
||||
.. tip::
|
||||
|
||||
The :class:`_orm.Mapped` class represents attributes that are handled
|
||||
directly by the :class:`_orm.Mapper` class. It does not include other
|
||||
Python descriptor classes that are provided as extensions, including
|
||||
:ref:`hybrids_toplevel` and the :ref:`associationproxy_toplevel`.
|
||||
While these systems still make use of ORM-specific superclasses
|
||||
and structures, they are not :term:`instrumented` by the
|
||||
:class:`_orm.Mapper` and instead provide their own functionality
|
||||
when they are accessed on a class.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: Any
|
||||
) -> InstrumentedAttribute[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: Any) -> _T_co: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: Optional[object], owner: Any
|
||||
) -> Union[InstrumentedAttribute[_T_co], _T_co]: ...
|
||||
|
||||
@classmethod
|
||||
def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: ...
|
||||
|
||||
def __set__(
|
||||
self, instance: Any, value: Union[SQLCoreOperations[_T_co], _T_co]
|
||||
) -> None: ...
|
||||
|
||||
def __delete__(self, instance: Any) -> None: ...
|
||||
|
||||
|
||||
class _MappedAttribute(Generic[_T_co], TypingOnly):
|
||||
"""Mixin for attributes which should be replaced by mapper-assigned
|
||||
attributes.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class _DeclarativeMapped(Mapped[_T_co], _MappedAttribute[_T_co]):
|
||||
"""Mixin for :class:`.MapperProperty` subclasses that allows them to
|
||||
be compatible with ORM-annotated declarative mappings.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
# MappedSQLExpression, Relationship, Composite etc. dont actually do
|
||||
# SQL expression behavior. yet there is code that compares them with
|
||||
# __eq__(), __ne__(), etc. Since #8847 made Mapped even more full
|
||||
# featured including ColumnOperators, we need to have those methods
|
||||
# be no-ops for these objects, so return NotImplemented to fall back
|
||||
# to normal comparison behavior.
|
||||
def operate(self, op: OperatorType, *other: Any, **kwargs: Any) -> Any:
|
||||
return NotImplemented
|
||||
|
||||
__sa_operate__ = operate
|
||||
|
||||
def reverse_operate(
|
||||
self, op: OperatorType, other: Any, **kwargs: Any
|
||||
) -> Any:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class DynamicMapped(_MappedAnnotationBase[_T_co]):
|
||||
"""Represent the ORM mapped attribute type for a "dynamic" relationship.
|
||||
|
||||
The :class:`_orm.DynamicMapped` type annotation may be used in an
|
||||
:ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
|
||||
to indicate that the ``lazy="dynamic"`` loader strategy should be used
|
||||
for a particular :func:`_orm.relationship`.
|
||||
|
||||
.. legacy:: The "dynamic" lazy loader strategy is the legacy form of what
|
||||
is now the "write_only" strategy described in the section
|
||||
:ref:`write_only_relationship`.
|
||||
|
||||
E.g.::
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
addresses: DynamicMapped[Address] = relationship(
|
||||
cascade="all,delete-orphan"
|
||||
)
|
||||
|
||||
See the section :ref:`dynamic_relationship` for background.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dynamic_relationship` - complete background
|
||||
|
||||
:class:`.WriteOnlyMapped` - fully 2.0 style version
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: Any
|
||||
) -> InstrumentedAttribute[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: object, owner: Any
|
||||
) -> AppenderQuery[_T_co]: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: Optional[object], owner: Any
|
||||
) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: ...
|
||||
|
||||
def __set__(
|
||||
self, instance: Any, value: typing.Collection[_T_co]
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class WriteOnlyMapped(_MappedAnnotationBase[_T_co]):
|
||||
"""Represent the ORM mapped attribute type for a "write only" relationship.
|
||||
|
||||
The :class:`_orm.WriteOnlyMapped` type annotation may be used in an
|
||||
:ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
|
||||
to indicate that the ``lazy="write_only"`` loader strategy should be used
|
||||
for a particular :func:`_orm.relationship`.
|
||||
|
||||
E.g.::
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
addresses: WriteOnlyMapped[Address] = relationship(
|
||||
cascade="all,delete-orphan"
|
||||
)
|
||||
|
||||
See the section :ref:`write_only_relationship` for background.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`write_only_relationship` - complete background
|
||||
|
||||
:class:`.DynamicMapped` - includes legacy :class:`_orm.Query` support
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: Any
|
||||
) -> InstrumentedAttribute[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: object, owner: Any
|
||||
) -> WriteOnlyCollection[_T_co]: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: Optional[object], owner: Any
|
||||
) -> Union[
|
||||
InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co]
|
||||
]: ...
|
||||
|
||||
def __set__(
|
||||
self, instance: Any, value: typing.Collection[_T_co]
|
||||
) -> None: ...
|
2123
venv/lib/python3.11/site-packages/sqlalchemy/orm/bulk_persistence.py
Normal file
2123
venv/lib/python3.11/site-packages/sqlalchemy/orm/bulk_persistence.py
Normal file
File diff suppressed because it is too large
Load Diff
571
venv/lib/python3.11/site-packages/sqlalchemy/orm/clsregistry.py
Normal file
571
venv/lib/python3.11/site-packages/sqlalchemy/orm/clsregistry.py
Normal file
@ -0,0 +1,571 @@
|
||||
# orm/clsregistry.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle the string class registry used by declarative.
|
||||
|
||||
This system allows specification of classes and expressions used in
|
||||
:func:`_orm.relationship` using strings.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
import weakref
|
||||
|
||||
from . import attributes
|
||||
from . import interfaces
|
||||
from .descriptor_props import SynonymProperty
|
||||
from .properties import ColumnProperty
|
||||
from .util import class_mapper
|
||||
from .. import exc
|
||||
from .. import inspection
|
||||
from .. import util
|
||||
from ..sql.schema import _get_table_key
|
||||
from ..util.typing import CallableReference
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .relationships import RelationshipProperty
|
||||
from ..sql.schema import MetaData
|
||||
from ..sql.schema import Table
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
_ClsRegistryType = MutableMapping[str, Union[type, "ClsRegistryToken"]]
|
||||
|
||||
# strong references to registries which we place in
|
||||
# the _decl_class_registry, which is usually weak referencing.
|
||||
# the internal registries here link to classes with weakrefs and remove
|
||||
# themselves when all references to contained classes are removed.
|
||||
_registries: Set[ClsRegistryToken] = set()
|
||||
|
||||
|
||||
def add_class(
|
||||
classname: str, cls: Type[_T], decl_class_registry: _ClsRegistryType
|
||||
) -> None:
|
||||
"""Add a class to the _decl_class_registry associated with the
|
||||
given declarative class.
|
||||
|
||||
"""
|
||||
if classname in decl_class_registry:
|
||||
# class already exists.
|
||||
existing = decl_class_registry[classname]
|
||||
if not isinstance(existing, _MultipleClassMarker):
|
||||
existing = decl_class_registry[classname] = _MultipleClassMarker(
|
||||
[cls, cast("Type[Any]", existing)]
|
||||
)
|
||||
else:
|
||||
decl_class_registry[classname] = cls
|
||||
|
||||
try:
|
||||
root_module = cast(
|
||||
_ModuleMarker, decl_class_registry["_sa_module_registry"]
|
||||
)
|
||||
except KeyError:
|
||||
decl_class_registry["_sa_module_registry"] = root_module = (
|
||||
_ModuleMarker("_sa_module_registry", None)
|
||||
)
|
||||
|
||||
tokens = cls.__module__.split(".")
|
||||
|
||||
# build up a tree like this:
|
||||
# modulename: myapp.snacks.nuts
|
||||
#
|
||||
# myapp->snack->nuts->(classes)
|
||||
# snack->nuts->(classes)
|
||||
# nuts->(classes)
|
||||
#
|
||||
# this allows partial token paths to be used.
|
||||
while tokens:
|
||||
token = tokens.pop(0)
|
||||
module = root_module.get_module(token)
|
||||
for token in tokens:
|
||||
module = module.get_module(token)
|
||||
|
||||
try:
|
||||
module.add_class(classname, cls)
|
||||
except AttributeError as ae:
|
||||
if not isinstance(module, _ModuleMarker):
|
||||
raise exc.InvalidRequestError(
|
||||
f'name "{classname}" matches both a '
|
||||
"class name and a module name"
|
||||
) from ae
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def remove_class(
|
||||
classname: str, cls: Type[Any], decl_class_registry: _ClsRegistryType
|
||||
) -> None:
|
||||
if classname in decl_class_registry:
|
||||
existing = decl_class_registry[classname]
|
||||
if isinstance(existing, _MultipleClassMarker):
|
||||
existing.remove_item(cls)
|
||||
else:
|
||||
del decl_class_registry[classname]
|
||||
|
||||
try:
|
||||
root_module = cast(
|
||||
_ModuleMarker, decl_class_registry["_sa_module_registry"]
|
||||
)
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
tokens = cls.__module__.split(".")
|
||||
|
||||
while tokens:
|
||||
token = tokens.pop(0)
|
||||
module = root_module.get_module(token)
|
||||
for token in tokens:
|
||||
module = module.get_module(token)
|
||||
try:
|
||||
module.remove_class(classname, cls)
|
||||
except AttributeError:
|
||||
if not isinstance(module, _ModuleMarker):
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def _key_is_empty(
|
||||
key: str,
|
||||
decl_class_registry: _ClsRegistryType,
|
||||
test: Callable[[Any], bool],
|
||||
) -> bool:
|
||||
"""test if a key is empty of a certain object.
|
||||
|
||||
used for unit tests against the registry to see if garbage collection
|
||||
is working.
|
||||
|
||||
"test" is a callable that will be passed an object should return True
|
||||
if the given object is the one we were looking for.
|
||||
|
||||
We can't pass the actual object itself b.c. this is for testing garbage
|
||||
collection; the caller will have to have removed references to the
|
||||
object itself.
|
||||
|
||||
"""
|
||||
if key not in decl_class_registry:
|
||||
return True
|
||||
|
||||
thing = decl_class_registry[key]
|
||||
if isinstance(thing, _MultipleClassMarker):
|
||||
for sub_thing in thing.contents:
|
||||
if test(sub_thing):
|
||||
return False
|
||||
else:
|
||||
raise NotImplementedError("unknown codepath")
|
||||
else:
|
||||
return not test(thing)
|
||||
|
||||
|
||||
class ClsRegistryToken:
|
||||
"""an object that can be in the registry._class_registry as a value."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class _MultipleClassMarker(ClsRegistryToken):
|
||||
"""refers to multiple classes of the same name
|
||||
within _decl_class_registry.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = "on_remove", "contents", "__weakref__"
|
||||
|
||||
contents: Set[weakref.ref[Type[Any]]]
|
||||
on_remove: CallableReference[Optional[Callable[[], None]]]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
classes: Iterable[Type[Any]],
|
||||
on_remove: Optional[Callable[[], None]] = None,
|
||||
):
|
||||
self.on_remove = on_remove
|
||||
self.contents = {
|
||||
weakref.ref(item, self._remove_item) for item in classes
|
||||
}
|
||||
_registries.add(self)
|
||||
|
||||
def remove_item(self, cls: Type[Any]) -> None:
|
||||
self._remove_item(weakref.ref(cls))
|
||||
|
||||
def __iter__(self) -> Generator[Optional[Type[Any]], None, None]:
|
||||
return (ref() for ref in self.contents)
|
||||
|
||||
def attempt_get(self, path: List[str], key: str) -> Type[Any]:
|
||||
if len(self.contents) > 1:
|
||||
raise exc.InvalidRequestError(
|
||||
'Multiple classes found for path "%s" '
|
||||
"in the registry of this declarative "
|
||||
"base. Please use a fully module-qualified path."
|
||||
% (".".join(path + [key]))
|
||||
)
|
||||
else:
|
||||
ref = list(self.contents)[0]
|
||||
cls = ref()
|
||||
if cls is None:
|
||||
raise NameError(key)
|
||||
return cls
|
||||
|
||||
def _remove_item(self, ref: weakref.ref[Type[Any]]) -> None:
|
||||
self.contents.discard(ref)
|
||||
if not self.contents:
|
||||
_registries.discard(self)
|
||||
if self.on_remove:
|
||||
self.on_remove()
|
||||
|
||||
def add_item(self, item: Type[Any]) -> None:
|
||||
# protect against class registration race condition against
|
||||
# asynchronous garbage collection calling _remove_item,
|
||||
# [ticket:3208] and [ticket:10782]
|
||||
modules = {
|
||||
cls.__module__
|
||||
for cls in [ref() for ref in list(self.contents)]
|
||||
if cls is not None
|
||||
}
|
||||
if item.__module__ in modules:
|
||||
util.warn(
|
||||
"This declarative base already contains a class with the "
|
||||
"same class name and module name as %s.%s, and will "
|
||||
"be replaced in the string-lookup table."
|
||||
% (item.__module__, item.__name__)
|
||||
)
|
||||
self.contents.add(weakref.ref(item, self._remove_item))
|
||||
|
||||
|
||||
class _ModuleMarker(ClsRegistryToken):
|
||||
"""Refers to a module name within
|
||||
_decl_class_registry.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "name", "contents", "mod_ns", "path", "__weakref__"
|
||||
|
||||
parent: Optional[_ModuleMarker]
|
||||
contents: Dict[str, Union[_ModuleMarker, _MultipleClassMarker]]
|
||||
mod_ns: _ModNS
|
||||
path: List[str]
|
||||
|
||||
def __init__(self, name: str, parent: Optional[_ModuleMarker]):
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.contents = {}
|
||||
self.mod_ns = _ModNS(self)
|
||||
if self.parent:
|
||||
self.path = self.parent.path + [self.name]
|
||||
else:
|
||||
self.path = []
|
||||
_registries.add(self)
|
||||
|
||||
def __contains__(self, name: str) -> bool:
|
||||
return name in self.contents
|
||||
|
||||
def __getitem__(self, name: str) -> ClsRegistryToken:
|
||||
return self.contents[name]
|
||||
|
||||
def _remove_item(self, name: str) -> None:
|
||||
self.contents.pop(name, None)
|
||||
if not self.contents:
|
||||
if self.parent is not None:
|
||||
self.parent._remove_item(self.name)
|
||||
_registries.discard(self)
|
||||
|
||||
def resolve_attr(self, key: str) -> Union[_ModNS, Type[Any]]:
|
||||
return self.mod_ns.__getattr__(key)
|
||||
|
||||
def get_module(self, name: str) -> _ModuleMarker:
|
||||
if name not in self.contents:
|
||||
marker = _ModuleMarker(name, self)
|
||||
self.contents[name] = marker
|
||||
else:
|
||||
marker = cast(_ModuleMarker, self.contents[name])
|
||||
return marker
|
||||
|
||||
def add_class(self, name: str, cls: Type[Any]) -> None:
|
||||
if name in self.contents:
|
||||
existing = cast(_MultipleClassMarker, self.contents[name])
|
||||
try:
|
||||
existing.add_item(cls)
|
||||
except AttributeError as ae:
|
||||
if not isinstance(existing, _MultipleClassMarker):
|
||||
raise exc.InvalidRequestError(
|
||||
f'name "{name}" matches both a '
|
||||
"class name and a module name"
|
||||
) from ae
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
existing = self.contents[name] = _MultipleClassMarker(
|
||||
[cls], on_remove=lambda: self._remove_item(name)
|
||||
)
|
||||
|
||||
def remove_class(self, name: str, cls: Type[Any]) -> None:
|
||||
if name in self.contents:
|
||||
existing = cast(_MultipleClassMarker, self.contents[name])
|
||||
existing.remove_item(cls)
|
||||
|
||||
|
||||
class _ModNS:
|
||||
__slots__ = ("__parent",)
|
||||
|
||||
__parent: _ModuleMarker
|
||||
|
||||
def __init__(self, parent: _ModuleMarker):
|
||||
self.__parent = parent
|
||||
|
||||
def __getattr__(self, key: str) -> Union[_ModNS, Type[Any]]:
|
||||
try:
|
||||
value = self.__parent.contents[key]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if value is not None:
|
||||
if isinstance(value, _ModuleMarker):
|
||||
return value.mod_ns
|
||||
else:
|
||||
assert isinstance(value, _MultipleClassMarker)
|
||||
return value.attempt_get(self.__parent.path, key)
|
||||
raise NameError(
|
||||
"Module %r has no mapped classes "
|
||||
"registered under the name %r" % (self.__parent.name, key)
|
||||
)
|
||||
|
||||
|
||||
class _GetColumns:
|
||||
__slots__ = ("cls",)
|
||||
|
||||
cls: Type[Any]
|
||||
|
||||
def __init__(self, cls: Type[Any]):
|
||||
self.cls = cls
|
||||
|
||||
def __getattr__(self, key: str) -> Any:
|
||||
mp = class_mapper(self.cls, configure=False)
|
||||
if mp:
|
||||
if key not in mp.all_orm_descriptors:
|
||||
raise AttributeError(
|
||||
"Class %r does not have a mapped column named %r"
|
||||
% (self.cls, key)
|
||||
)
|
||||
|
||||
desc = mp.all_orm_descriptors[key]
|
||||
if desc.extension_type is interfaces.NotExtension.NOT_EXTENSION:
|
||||
assert isinstance(desc, attributes.QueryableAttribute)
|
||||
prop = desc.property
|
||||
if isinstance(prop, SynonymProperty):
|
||||
key = prop.name
|
||||
elif not isinstance(prop, ColumnProperty):
|
||||
raise exc.InvalidRequestError(
|
||||
"Property %r is not an instance of"
|
||||
" ColumnProperty (i.e. does not correspond"
|
||||
" directly to a Column)." % key
|
||||
)
|
||||
return getattr(self.cls, key)
|
||||
|
||||
|
||||
inspection._inspects(_GetColumns)(
|
||||
lambda target: inspection.inspect(target.cls)
|
||||
)
|
||||
|
||||
|
||||
class _GetTable:
|
||||
__slots__ = "key", "metadata"
|
||||
|
||||
key: str
|
||||
metadata: MetaData
|
||||
|
||||
def __init__(self, key: str, metadata: MetaData):
|
||||
self.key = key
|
||||
self.metadata = metadata
|
||||
|
||||
def __getattr__(self, key: str) -> Table:
|
||||
return self.metadata.tables[_get_table_key(key, self.key)]
|
||||
|
||||
|
||||
def _determine_container(key: str, value: Any) -> _GetColumns:
|
||||
if isinstance(value, _MultipleClassMarker):
|
||||
value = value.attempt_get([], key)
|
||||
return _GetColumns(value)
|
||||
|
||||
|
||||
class _class_resolver:
|
||||
__slots__ = (
|
||||
"cls",
|
||||
"prop",
|
||||
"arg",
|
||||
"fallback",
|
||||
"_dict",
|
||||
"_resolvers",
|
||||
"favor_tables",
|
||||
)
|
||||
|
||||
cls: Type[Any]
|
||||
prop: RelationshipProperty[Any]
|
||||
fallback: Mapping[str, Any]
|
||||
arg: str
|
||||
favor_tables: bool
|
||||
_resolvers: Tuple[Callable[[str], Any], ...]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cls: Type[Any],
|
||||
prop: RelationshipProperty[Any],
|
||||
fallback: Mapping[str, Any],
|
||||
arg: str,
|
||||
favor_tables: bool = False,
|
||||
):
|
||||
self.cls = cls
|
||||
self.prop = prop
|
||||
self.arg = arg
|
||||
self.fallback = fallback
|
||||
self._dict = util.PopulateDict(self._access_cls)
|
||||
self._resolvers = ()
|
||||
self.favor_tables = favor_tables
|
||||
|
||||
def _access_cls(self, key: str) -> Any:
|
||||
cls = self.cls
|
||||
|
||||
manager = attributes.manager_of_class(cls)
|
||||
decl_base = manager.registry
|
||||
assert decl_base is not None
|
||||
decl_class_registry = decl_base._class_registry
|
||||
metadata = decl_base.metadata
|
||||
|
||||
if self.favor_tables:
|
||||
if key in metadata.tables:
|
||||
return metadata.tables[key]
|
||||
elif key in metadata._schemas:
|
||||
return _GetTable(key, getattr(cls, "metadata", metadata))
|
||||
|
||||
if key in decl_class_registry:
|
||||
return _determine_container(key, decl_class_registry[key])
|
||||
|
||||
if not self.favor_tables:
|
||||
if key in metadata.tables:
|
||||
return metadata.tables[key]
|
||||
elif key in metadata._schemas:
|
||||
return _GetTable(key, getattr(cls, "metadata", metadata))
|
||||
|
||||
if "_sa_module_registry" in decl_class_registry and key in cast(
|
||||
_ModuleMarker, decl_class_registry["_sa_module_registry"]
|
||||
):
|
||||
registry = cast(
|
||||
_ModuleMarker, decl_class_registry["_sa_module_registry"]
|
||||
)
|
||||
return registry.resolve_attr(key)
|
||||
elif self._resolvers:
|
||||
for resolv in self._resolvers:
|
||||
value = resolv(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
return self.fallback[key]
|
||||
|
||||
def _raise_for_name(self, name: str, err: Exception) -> NoReturn:
|
||||
generic_match = re.match(r"(.+)\[(.+)\]", name)
|
||||
|
||||
if generic_match:
|
||||
clsarg = generic_match.group(2).strip("'")
|
||||
raise exc.InvalidRequestError(
|
||||
f"When initializing mapper {self.prop.parent}, "
|
||||
f'expression "relationship({self.arg!r})" seems to be '
|
||||
"using a generic class as the argument to relationship(); "
|
||||
"please state the generic argument "
|
||||
"using an annotation, e.g. "
|
||||
f'"{self.prop.key}: Mapped[{generic_match.group(1)}'
|
||||
f"['{clsarg}']] = relationship()\""
|
||||
) from err
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"When initializing mapper %s, expression %r failed to "
|
||||
"locate a name (%r). If this is a class name, consider "
|
||||
"adding this relationship() to the %r class after "
|
||||
"both dependent classes have been defined."
|
||||
% (self.prop.parent, self.arg, name, self.cls)
|
||||
) from err
|
||||
|
||||
def _resolve_name(self) -> Union[Table, Type[Any], _ModNS]:
|
||||
name = self.arg
|
||||
d = self._dict
|
||||
rval = None
|
||||
try:
|
||||
for token in name.split("."):
|
||||
if rval is None:
|
||||
rval = d[token]
|
||||
else:
|
||||
rval = getattr(rval, token)
|
||||
except KeyError as err:
|
||||
self._raise_for_name(name, err)
|
||||
except NameError as n:
|
||||
self._raise_for_name(n.args[0], n)
|
||||
else:
|
||||
if isinstance(rval, _GetColumns):
|
||||
return rval.cls
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(rval, (type, Table, _ModNS))
|
||||
return rval
|
||||
|
||||
def __call__(self) -> Any:
|
||||
try:
|
||||
x = eval(self.arg, globals(), self._dict)
|
||||
|
||||
if isinstance(x, _GetColumns):
|
||||
return x.cls
|
||||
else:
|
||||
return x
|
||||
except NameError as n:
|
||||
self._raise_for_name(n.args[0], n)
|
||||
|
||||
|
||||
_fallback_dict: Mapping[str, Any] = None # type: ignore
|
||||
|
||||
|
||||
def _resolver(cls: Type[Any], prop: RelationshipProperty[Any]) -> Tuple[
|
||||
Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]],
|
||||
Callable[[str, bool], _class_resolver],
|
||||
]:
|
||||
global _fallback_dict
|
||||
|
||||
if _fallback_dict is None:
|
||||
import sqlalchemy
|
||||
from . import foreign
|
||||
from . import remote
|
||||
|
||||
_fallback_dict = util.immutabledict(sqlalchemy.__dict__).union(
|
||||
{"foreign": foreign, "remote": remote}
|
||||
)
|
||||
|
||||
def resolve_arg(arg: str, favor_tables: bool = False) -> _class_resolver:
|
||||
return _class_resolver(
|
||||
cls, prop, _fallback_dict, arg, favor_tables=favor_tables
|
||||
)
|
||||
|
||||
def resolve_name(
|
||||
arg: str,
|
||||
) -> Callable[[], Union[Type[Any], Table, _ModNS]]:
|
||||
return _class_resolver(cls, prop, _fallback_dict, arg)._resolve_name
|
||||
|
||||
return resolve_name, resolve_arg
|
1627
venv/lib/python3.11/site-packages/sqlalchemy/orm/collections.py
Normal file
1627
venv/lib/python3.11/site-packages/sqlalchemy/orm/collections.py
Normal file
File diff suppressed because it is too large
Load Diff
3336
venv/lib/python3.11/site-packages/sqlalchemy/orm/context.py
Normal file
3336
venv/lib/python3.11/site-packages/sqlalchemy/orm/context.py
Normal file
File diff suppressed because it is too large
Load Diff
1917
venv/lib/python3.11/site-packages/sqlalchemy/orm/decl_api.py
Normal file
1917
venv/lib/python3.11/site-packages/sqlalchemy/orm/decl_api.py
Normal file
File diff suppressed because it is too large
Load Diff
2188
venv/lib/python3.11/site-packages/sqlalchemy/orm/decl_base.py
Normal file
2188
venv/lib/python3.11/site-packages/sqlalchemy/orm/decl_base.py
Normal file
File diff suppressed because it is too large
Load Diff
1304
venv/lib/python3.11/site-packages/sqlalchemy/orm/dependency.py
Normal file
1304
venv/lib/python3.11/site-packages/sqlalchemy/orm/dependency.py
Normal file
File diff suppressed because it is too large
Load Diff
1077
venv/lib/python3.11/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
1077
venv/lib/python3.11/site-packages/sqlalchemy/orm/descriptor_props.py
Normal file
File diff suppressed because it is too large
Load Diff
300
venv/lib/python3.11/site-packages/sqlalchemy/orm/dynamic.py
Normal file
300
venv/lib/python3.11/site-packages/sqlalchemy/orm/dynamic.py
Normal file
@ -0,0 +1,300 @@
|
||||
# orm/dynamic.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
"""Dynamic collection API.
|
||||
|
||||
Dynamic collections act like Query() objects for read operations and support
|
||||
basic add/delete mutation.
|
||||
|
||||
.. legacy:: the "dynamic" loader is a legacy feature, superseded by the
|
||||
"write_only" loader.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from . import attributes
|
||||
from . import exc as orm_exc
|
||||
from . import relationships
|
||||
from . import util as orm_util
|
||||
from .base import PassiveFlag
|
||||
from .query import Query
|
||||
from .session import object_session
|
||||
from .writeonly import AbstractCollectionWriter
|
||||
from .writeonly import WriteOnlyAttributeImpl
|
||||
from .writeonly import WriteOnlyHistory
|
||||
from .writeonly import WriteOnlyLoader
|
||||
from .. import util
|
||||
from ..engine import result
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import QueryableAttribute
|
||||
from .mapper import Mapper
|
||||
from .relationships import _RelationshipOrderByArg
|
||||
from .session import Session
|
||||
from .state import InstanceState
|
||||
from .util import AliasedClass
|
||||
from ..event import _Dispatch
|
||||
from ..sql.elements import ColumnElement
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
|
||||
class DynamicCollectionHistory(WriteOnlyHistory[_T]):
|
||||
def __init__(
|
||||
self,
|
||||
attr: DynamicAttributeImpl,
|
||||
state: InstanceState[_T],
|
||||
passive: PassiveFlag,
|
||||
apply_to: Optional[DynamicCollectionHistory[_T]] = None,
|
||||
) -> None:
|
||||
if apply_to:
|
||||
coll = AppenderQuery(attr, state).autoflush(False)
|
||||
self.unchanged_items = util.OrderedIdentitySet(coll)
|
||||
self.added_items = apply_to.added_items
|
||||
self.deleted_items = apply_to.deleted_items
|
||||
self._reconcile_collection = True
|
||||
else:
|
||||
self.deleted_items = util.OrderedIdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
self.unchanged_items = util.OrderedIdentitySet()
|
||||
self._reconcile_collection = False
|
||||
|
||||
|
||||
class DynamicAttributeImpl(WriteOnlyAttributeImpl):
|
||||
_supports_dynamic_iteration = True
|
||||
collection_history_cls = DynamicCollectionHistory[Any]
|
||||
query_class: Type[AppenderMixin[Any]] # type: ignore[assignment]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
class_: Union[Type[Any], AliasedClass[Any]],
|
||||
key: str,
|
||||
dispatch: _Dispatch[QueryableAttribute[Any]],
|
||||
target_mapper: Mapper[_T],
|
||||
order_by: _RelationshipOrderByArg,
|
||||
query_class: Optional[Type[AppenderMixin[_T]]] = None,
|
||||
**kw: Any,
|
||||
) -> None:
|
||||
attributes.AttributeImpl.__init__(
|
||||
self, class_, key, None, dispatch, **kw
|
||||
)
|
||||
self.target_mapper = target_mapper
|
||||
if order_by:
|
||||
self.order_by = tuple(order_by)
|
||||
if not query_class:
|
||||
self.query_class = AppenderQuery
|
||||
elif AppenderMixin in query_class.mro():
|
||||
self.query_class = query_class
|
||||
else:
|
||||
self.query_class = mixin_user_query(query_class)
|
||||
|
||||
|
||||
@relationships.RelationshipProperty.strategy_for(lazy="dynamic")
|
||||
class DynaLoader(WriteOnlyLoader):
|
||||
impl_class = DynamicAttributeImpl
|
||||
|
||||
|
||||
class AppenderMixin(AbstractCollectionWriter[_T]):
|
||||
"""A mixin that expects to be mixing in a Query class with
|
||||
AbstractAppender.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
query_class: Optional[Type[Query[_T]]] = None
|
||||
_order_by_clauses: Tuple[ColumnElement[Any], ...]
|
||||
|
||||
def __init__(
|
||||
self, attr: DynamicAttributeImpl, state: InstanceState[_T]
|
||||
) -> None:
|
||||
Query.__init__(
|
||||
self, # type: ignore[arg-type]
|
||||
attr.target_mapper,
|
||||
None,
|
||||
)
|
||||
super().__init__(attr, state)
|
||||
|
||||
@property
|
||||
def session(self) -> Optional[Session]:
|
||||
sess = object_session(self.instance)
|
||||
if sess is not None and sess.autoflush and self.instance in sess:
|
||||
sess.flush()
|
||||
if not orm_util.has_identity(self.instance):
|
||||
return None
|
||||
else:
|
||||
return sess
|
||||
|
||||
@session.setter
|
||||
def session(self, session: Session) -> None:
|
||||
self.sess = session
|
||||
|
||||
def _iter(self) -> Union[result.ScalarResult[_T], result.Result[_T]]:
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
state = attributes.instance_state(self.instance)
|
||||
if state.detached:
|
||||
util.warn(
|
||||
"Instance %s is detached, dynamic relationship cannot "
|
||||
"return a correct result. This warning will become "
|
||||
"a DetachedInstanceError in a future release."
|
||||
% (orm_util.state_str(state))
|
||||
)
|
||||
|
||||
return result.IteratorResult(
|
||||
result.SimpleResultMetaData([self.attr.class_.__name__]),
|
||||
iter(
|
||||
self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
).added_items
|
||||
),
|
||||
_source_supports_scalars=True,
|
||||
).scalars()
|
||||
else:
|
||||
return self._generate(sess)._iter()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def __iter__(self) -> Iterator[_T]: ...
|
||||
|
||||
def __getitem__(self, index: Any) -> Union[_T, List[_T]]:
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
).indexed(index)
|
||||
else:
|
||||
return self._generate(sess).__getitem__(index) # type: ignore[no-any-return] # noqa: E501
|
||||
|
||||
def count(self) -> int:
|
||||
sess = self.session
|
||||
if sess is None:
|
||||
return len(
|
||||
self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
).added_items
|
||||
)
|
||||
else:
|
||||
return self._generate(sess).count()
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
sess: Optional[Session] = None,
|
||||
) -> Query[_T]:
|
||||
# note we're returning an entirely new Query class instance
|
||||
# here without any assignment capabilities; the class of this
|
||||
# query is determined by the session.
|
||||
instance = self.instance
|
||||
if sess is None:
|
||||
sess = object_session(instance)
|
||||
if sess is None:
|
||||
raise orm_exc.DetachedInstanceError(
|
||||
"Parent instance %s is not bound to a Session, and no "
|
||||
"contextual session is established; lazy load operation "
|
||||
"of attribute '%s' cannot proceed"
|
||||
% (orm_util.instance_str(instance), self.attr.key)
|
||||
)
|
||||
|
||||
if self.query_class:
|
||||
query = self.query_class(self.attr.target_mapper, session=sess)
|
||||
else:
|
||||
query = sess.query(self.attr.target_mapper)
|
||||
|
||||
query._where_criteria = self._where_criteria
|
||||
query._from_obj = self._from_obj
|
||||
query._order_by_clauses = self._order_by_clauses
|
||||
|
||||
return query
|
||||
|
||||
def add_all(self, iterator: Iterable[_T]) -> None:
|
||||
"""Add an iterable of items to this :class:`_orm.AppenderQuery`.
|
||||
|
||||
The given items will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
This method is provided to assist in delivering forwards-compatibility
|
||||
with the :class:`_orm.WriteOnlyCollection` collection class.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
"""
|
||||
self._add_all_impl(iterator)
|
||||
|
||||
def add(self, item: _T) -> None:
|
||||
"""Add an item to this :class:`_orm.AppenderQuery`.
|
||||
|
||||
The given item will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
This method is provided to assist in delivering forwards-compatibility
|
||||
with the :class:`_orm.WriteOnlyCollection` collection class.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
"""
|
||||
self._add_all_impl([item])
|
||||
|
||||
def extend(self, iterator: Iterable[_T]) -> None:
|
||||
"""Add an iterable of items to this :class:`_orm.AppenderQuery`.
|
||||
|
||||
The given items will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
"""
|
||||
self._add_all_impl(iterator)
|
||||
|
||||
def append(self, item: _T) -> None:
|
||||
"""Append an item to this :class:`_orm.AppenderQuery`.
|
||||
|
||||
The given item will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
"""
|
||||
self._add_all_impl([item])
|
||||
|
||||
def remove(self, item: _T) -> None:
|
||||
"""Remove an item from this :class:`_orm.AppenderQuery`.
|
||||
|
||||
The given item will be removed from the parent instance's collection on
|
||||
the next flush.
|
||||
|
||||
"""
|
||||
self._remove_impl(item)
|
||||
|
||||
|
||||
class AppenderQuery(AppenderMixin[_T], Query[_T]): # type: ignore[misc]
|
||||
"""A dynamic query that supports basic collection storage operations.
|
||||
|
||||
Methods on :class:`.AppenderQuery` include all methods of
|
||||
:class:`_orm.Query`, plus additional methods used for collection
|
||||
persistence.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def mixin_user_query(cls: Any) -> type[AppenderMixin[Any]]:
|
||||
"""Return a new class with AppenderQuery functionality layered over."""
|
||||
name = "Appender" + cls.__name__
|
||||
return type(name, (AppenderMixin, cls), {"query_class": cls})
|
379
venv/lib/python3.11/site-packages/sqlalchemy/orm/evaluator.py
Normal file
379
venv/lib/python3.11/site-packages/sqlalchemy/orm/evaluator.py
Normal file
@ -0,0 +1,379 @@
|
||||
# orm/evaluator.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
"""Evaluation functions used **INTERNALLY** by ORM DML use cases.
|
||||
|
||||
|
||||
This module is **private, for internal use by SQLAlchemy**.
|
||||
|
||||
.. versionchanged:: 2.0.4 renamed ``EvaluatorCompiler`` to
|
||||
``_EvaluatorCompiler``.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Type
|
||||
|
||||
from . import exc as orm_exc
|
||||
from .base import LoaderCallableStatus
|
||||
from .base import PassiveFlag
|
||||
from .. import exc
|
||||
from .. import inspect
|
||||
from ..sql import and_
|
||||
from ..sql import operators
|
||||
from ..sql.sqltypes import Concatenable
|
||||
from ..sql.sqltypes import Integer
|
||||
from ..sql.sqltypes import Numeric
|
||||
from ..util import warn_deprecated
|
||||
|
||||
|
||||
class UnevaluatableError(exc.InvalidRequestError):
|
||||
pass
|
||||
|
||||
|
||||
class _NoObject(operators.ColumnOperators):
|
||||
def operate(self, *arg, **kw):
|
||||
return None
|
||||
|
||||
def reverse_operate(self, *arg, **kw):
|
||||
return None
|
||||
|
||||
|
||||
class _ExpiredObject(operators.ColumnOperators):
|
||||
def operate(self, *arg, **kw):
|
||||
return self
|
||||
|
||||
def reverse_operate(self, *arg, **kw):
|
||||
return self
|
||||
|
||||
|
||||
_NO_OBJECT = _NoObject()
|
||||
_EXPIRED_OBJECT = _ExpiredObject()
|
||||
|
||||
|
||||
class _EvaluatorCompiler:
|
||||
def __init__(self, target_cls=None):
|
||||
self.target_cls = target_cls
|
||||
|
||||
def process(self, clause, *clauses):
|
||||
if clauses:
|
||||
clause = and_(clause, *clauses)
|
||||
|
||||
meth = getattr(self, f"visit_{clause.__visit_name__}", None)
|
||||
if not meth:
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate {type(clause).__name__}"
|
||||
)
|
||||
return meth(clause)
|
||||
|
||||
def visit_grouping(self, clause):
|
||||
return self.process(clause.element)
|
||||
|
||||
def visit_null(self, clause):
|
||||
return lambda obj: None
|
||||
|
||||
def visit_false(self, clause):
|
||||
return lambda obj: False
|
||||
|
||||
def visit_true(self, clause):
|
||||
return lambda obj: True
|
||||
|
||||
def visit_column(self, clause):
|
||||
try:
|
||||
parentmapper = clause._annotations["parentmapper"]
|
||||
except KeyError as ke:
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate column: {clause}"
|
||||
) from ke
|
||||
|
||||
if self.target_cls and not issubclass(
|
||||
self.target_cls, parentmapper.class_
|
||||
):
|
||||
raise UnevaluatableError(
|
||||
"Can't evaluate criteria against "
|
||||
f"alternate class {parentmapper.class_}"
|
||||
)
|
||||
|
||||
parentmapper._check_configure()
|
||||
|
||||
# we'd like to use "proxy_key" annotation to get the "key", however
|
||||
# in relationship primaryjoin cases proxy_key is sometimes deannotated
|
||||
# and sometimes apparently not present in the first place (?).
|
||||
# While I can stop it from being deannotated (though need to see if
|
||||
# this breaks other things), not sure right now about cases where it's
|
||||
# not there in the first place. can fix at some later point.
|
||||
# key = clause._annotations["proxy_key"]
|
||||
|
||||
# for now, use the old way
|
||||
try:
|
||||
key = parentmapper._columntoproperty[clause].key
|
||||
except orm_exc.UnmappedColumnError as err:
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate expression: {err}"
|
||||
) from err
|
||||
|
||||
# note this used to fall back to a simple `getattr(obj, key)` evaluator
|
||||
# if impl was None; as of #8656, we ensure mappers are configured
|
||||
# so that impl is available
|
||||
impl = parentmapper.class_manager[key].impl
|
||||
|
||||
def get_corresponding_attr(obj):
|
||||
if obj is None:
|
||||
return _NO_OBJECT
|
||||
state = inspect(obj)
|
||||
dict_ = state.dict
|
||||
|
||||
value = impl.get(
|
||||
state, dict_, passive=PassiveFlag.PASSIVE_NO_FETCH
|
||||
)
|
||||
if value is LoaderCallableStatus.PASSIVE_NO_RESULT:
|
||||
return _EXPIRED_OBJECT
|
||||
return value
|
||||
|
||||
return get_corresponding_attr
|
||||
|
||||
def visit_tuple(self, clause):
|
||||
return self.visit_clauselist(clause)
|
||||
|
||||
def visit_expression_clauselist(self, clause):
|
||||
return self.visit_clauselist(clause)
|
||||
|
||||
def visit_clauselist(self, clause):
|
||||
evaluators = [self.process(clause) for clause in clause.clauses]
|
||||
|
||||
dispatch = (
|
||||
f"visit_{clause.operator.__name__.rstrip('_')}_clauselist_op"
|
||||
)
|
||||
meth = getattr(self, dispatch, None)
|
||||
if meth:
|
||||
return meth(clause.operator, evaluators, clause)
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate clauselist with operator {clause.operator}"
|
||||
)
|
||||
|
||||
def visit_binary(self, clause):
|
||||
eval_left = self.process(clause.left)
|
||||
eval_right = self.process(clause.right)
|
||||
|
||||
dispatch = f"visit_{clause.operator.__name__.rstrip('_')}_binary_op"
|
||||
meth = getattr(self, dispatch, None)
|
||||
if meth:
|
||||
return meth(clause.operator, eval_left, eval_right, clause)
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate {type(clause).__name__} with "
|
||||
f"operator {clause.operator}"
|
||||
)
|
||||
|
||||
def visit_or_clauselist_op(self, operator, evaluators, clause):
|
||||
def evaluate(obj):
|
||||
has_null = False
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if value is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
elif value:
|
||||
return True
|
||||
has_null = has_null or value is None
|
||||
if has_null:
|
||||
return None
|
||||
return False
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_and_clauselist_op(self, operator, evaluators, clause):
|
||||
def evaluate(obj):
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if value is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
|
||||
if not value:
|
||||
if value is None or value is _NO_OBJECT:
|
||||
return None
|
||||
return False
|
||||
return True
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_comma_op_clauselist_op(self, operator, evaluators, clause):
|
||||
def evaluate(obj):
|
||||
values = []
|
||||
for sub_evaluate in evaluators:
|
||||
value = sub_evaluate(obj)
|
||||
if value is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
elif value is None or value is _NO_OBJECT:
|
||||
return None
|
||||
values.append(value)
|
||||
return tuple(values)
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_custom_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
if operator.python_impl:
|
||||
return self._straight_evaluate(
|
||||
operator, eval_left, eval_right, clause
|
||||
)
|
||||
else:
|
||||
raise UnevaluatableError(
|
||||
f"Custom operator {operator.opstring!r} can't be evaluated "
|
||||
"in Python unless it specifies a callable using "
|
||||
"`.python_impl`."
|
||||
)
|
||||
|
||||
def visit_is_binary_op(self, operator, eval_left, eval_right, clause):
|
||||
def evaluate(obj):
|
||||
left_val = eval_left(obj)
|
||||
right_val = eval_right(obj)
|
||||
if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
return left_val == right_val
|
||||
|
||||
return evaluate
|
||||
|
||||
def visit_is_not_binary_op(self, operator, eval_left, eval_right, clause):
|
||||
def evaluate(obj):
|
||||
left_val = eval_left(obj)
|
||||
right_val = eval_right(obj)
|
||||
if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
return left_val != right_val
|
||||
|
||||
return evaluate
|
||||
|
||||
def _straight_evaluate(self, operator, eval_left, eval_right, clause):
|
||||
def evaluate(obj):
|
||||
left_val = eval_left(obj)
|
||||
right_val = eval_right(obj)
|
||||
if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
elif left_val is None or right_val is None:
|
||||
return None
|
||||
|
||||
return operator(eval_left(obj), eval_right(obj))
|
||||
|
||||
return evaluate
|
||||
|
||||
def _straight_evaluate_numeric_only(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
if clause.left.type._type_affinity not in (
|
||||
Numeric,
|
||||
Integer,
|
||||
) or clause.right.type._type_affinity not in (Numeric, Integer):
|
||||
raise UnevaluatableError(
|
||||
f'Cannot evaluate math operator "{operator.__name__}" for '
|
||||
f"datatypes {clause.left.type}, {clause.right.type}"
|
||||
)
|
||||
|
||||
return self._straight_evaluate(operator, eval_left, eval_right, clause)
|
||||
|
||||
visit_add_binary_op = _straight_evaluate_numeric_only
|
||||
visit_mul_binary_op = _straight_evaluate_numeric_only
|
||||
visit_sub_binary_op = _straight_evaluate_numeric_only
|
||||
visit_mod_binary_op = _straight_evaluate_numeric_only
|
||||
visit_truediv_binary_op = _straight_evaluate_numeric_only
|
||||
visit_lt_binary_op = _straight_evaluate
|
||||
visit_le_binary_op = _straight_evaluate
|
||||
visit_ne_binary_op = _straight_evaluate
|
||||
visit_gt_binary_op = _straight_evaluate
|
||||
visit_ge_binary_op = _straight_evaluate
|
||||
visit_eq_binary_op = _straight_evaluate
|
||||
|
||||
def visit_in_op_binary_op(self, operator, eval_left, eval_right, clause):
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a in b if a is not _NO_OBJECT else None,
|
||||
eval_left,
|
||||
eval_right,
|
||||
clause,
|
||||
)
|
||||
|
||||
def visit_not_in_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a not in b if a is not _NO_OBJECT else None,
|
||||
eval_left,
|
||||
eval_right,
|
||||
clause,
|
||||
)
|
||||
|
||||
def visit_concat_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
|
||||
if not issubclass(
|
||||
clause.left.type._type_affinity, Concatenable
|
||||
) or not issubclass(clause.right.type._type_affinity, Concatenable):
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate concatenate operator "
|
||||
f'"{operator.__name__}" for '
|
||||
f"datatypes {clause.left.type}, {clause.right.type}"
|
||||
)
|
||||
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a + b, eval_left, eval_right, clause
|
||||
)
|
||||
|
||||
def visit_startswith_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a.startswith(b), eval_left, eval_right, clause
|
||||
)
|
||||
|
||||
def visit_endswith_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a.endswith(b), eval_left, eval_right, clause
|
||||
)
|
||||
|
||||
def visit_unary(self, clause):
|
||||
eval_inner = self.process(clause.element)
|
||||
if clause.operator is operators.inv:
|
||||
|
||||
def evaluate(obj):
|
||||
value = eval_inner(obj)
|
||||
if value is _EXPIRED_OBJECT:
|
||||
return _EXPIRED_OBJECT
|
||||
elif value is None:
|
||||
return None
|
||||
return not value
|
||||
|
||||
return evaluate
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate {type(clause).__name__} "
|
||||
f"with operator {clause.operator}"
|
||||
)
|
||||
|
||||
def visit_bindparam(self, clause):
|
||||
if clause.callable:
|
||||
val = clause.callable()
|
||||
else:
|
||||
val = clause.value
|
||||
return lambda obj: val
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Type[_EvaluatorCompiler]:
|
||||
if name == "EvaluatorCompiler":
|
||||
warn_deprecated(
|
||||
"Direct use of 'EvaluatorCompiler' is not supported, and this "
|
||||
"name will be removed in a future release. "
|
||||
"'_EvaluatorCompiler' is for internal use only",
|
||||
"2.0",
|
||||
)
|
||||
return _EvaluatorCompiler
|
||||
else:
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
3271
venv/lib/python3.11/site-packages/sqlalchemy/orm/events.py
Normal file
3271
venv/lib/python3.11/site-packages/sqlalchemy/orm/events.py
Normal file
File diff suppressed because it is too large
Load Diff
237
venv/lib/python3.11/site-packages/sqlalchemy/orm/exc.py
Normal file
237
venv/lib/python3.11/site-packages/sqlalchemy/orm/exc.py
Normal file
@ -0,0 +1,237 @@
|
||||
# orm/exc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQLAlchemy ORM exceptions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
from .util import _mapper_property_as_plain_name
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
from ..exc import MultipleResultsFound # noqa
|
||||
from ..exc import NoResultFound # noqa
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .interfaces import LoaderStrategy
|
||||
from .interfaces import MapperProperty
|
||||
from .state import InstanceState
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
NO_STATE = (AttributeError, KeyError)
|
||||
"""Exception types that may be raised by instrumentation implementations."""
|
||||
|
||||
|
||||
class StaleDataError(sa_exc.SQLAlchemyError):
|
||||
"""An operation encountered database state that is unaccounted for.
|
||||
|
||||
Conditions which cause this to happen include:
|
||||
|
||||
* A flush may have attempted to update or delete rows
|
||||
and an unexpected number of rows were matched during
|
||||
the UPDATE or DELETE statement. Note that when
|
||||
version_id_col is used, rows in UPDATE or DELETE statements
|
||||
are also matched against the current known version
|
||||
identifier.
|
||||
|
||||
* A mapped object with version_id_col was refreshed,
|
||||
and the version number coming back from the database does
|
||||
not match that of the object itself.
|
||||
|
||||
* A object is detached from its parent object, however
|
||||
the object was previously attached to a different parent
|
||||
identity which was garbage collected, and a decision
|
||||
cannot be made if the new parent was really the most
|
||||
recent "parent".
|
||||
|
||||
"""
|
||||
|
||||
|
||||
ConcurrentModificationError = StaleDataError
|
||||
|
||||
|
||||
class FlushError(sa_exc.SQLAlchemyError):
|
||||
"""A invalid condition was detected during flush()."""
|
||||
|
||||
|
||||
class MappedAnnotationError(sa_exc.ArgumentError):
|
||||
"""Raised when ORM annotated declarative cannot interpret the
|
||||
expression present inside of the :class:`.Mapped` construct.
|
||||
|
||||
.. versionadded:: 2.0.40
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class UnmappedError(sa_exc.InvalidRequestError):
|
||||
"""Base for exceptions that involve expected mappings not present."""
|
||||
|
||||
|
||||
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
|
||||
"""An operation cannot complete due to an object being garbage
|
||||
collected.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DetachedInstanceError(sa_exc.SQLAlchemyError):
|
||||
"""An attempt to access unloaded attributes on a
|
||||
mapped instance that is detached."""
|
||||
|
||||
code = "bhk3"
|
||||
|
||||
|
||||
class UnmappedInstanceError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown instance."""
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.base")
|
||||
def __init__(self, obj: object, msg: Optional[str] = None):
|
||||
base = util.preloaded.orm_base
|
||||
|
||||
if not msg:
|
||||
try:
|
||||
base.class_mapper(type(obj))
|
||||
name = _safe_cls_name(type(obj))
|
||||
msg = (
|
||||
"Class %r is mapped, but this instance lacks "
|
||||
"instrumentation. This occurs when the instance "
|
||||
"is created before sqlalchemy.orm.mapper(%s) "
|
||||
"was called." % (name, name)
|
||||
)
|
||||
except UnmappedClassError:
|
||||
msg = f"Class '{_safe_cls_name(type(obj))}' is not mapped"
|
||||
if isinstance(obj, type):
|
||||
msg += (
|
||||
"; was a class (%s) supplied where an instance was "
|
||||
"required?" % _safe_cls_name(obj)
|
||||
)
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self) -> Any:
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedClassError(UnmappedError):
|
||||
"""An mapping operation was requested for an unknown class."""
|
||||
|
||||
def __init__(self, cls: Type[_T], msg: Optional[str] = None):
|
||||
if not msg:
|
||||
msg = _default_unmapped(cls)
|
||||
UnmappedError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self) -> Any:
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class ObjectDeletedError(sa_exc.InvalidRequestError):
|
||||
"""A refresh operation failed to retrieve the database
|
||||
row corresponding to an object's known primary key identity.
|
||||
|
||||
A refresh operation proceeds when an expired attribute is
|
||||
accessed on an object, or when :meth:`_query.Query.get` is
|
||||
used to retrieve an object which is, upon retrieval, detected
|
||||
as expired. A SELECT is emitted for the target row
|
||||
based on primary key; if no row is returned, this
|
||||
exception is raised.
|
||||
|
||||
The true meaning of this exception is simply that
|
||||
no row exists for the primary key identifier associated
|
||||
with a persistent object. The row may have been
|
||||
deleted, or in some cases the primary key updated
|
||||
to a new value, outside of the ORM's management of the target
|
||||
object.
|
||||
|
||||
"""
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.base")
|
||||
def __init__(self, state: InstanceState[Any], msg: Optional[str] = None):
|
||||
base = util.preloaded.orm_base
|
||||
|
||||
if not msg:
|
||||
msg = (
|
||||
"Instance '%s' has been deleted, or its "
|
||||
"row is otherwise not present." % base.state_str(state)
|
||||
)
|
||||
|
||||
sa_exc.InvalidRequestError.__init__(self, msg)
|
||||
|
||||
def __reduce__(self) -> Any:
|
||||
return self.__class__, (None, self.args[0])
|
||||
|
||||
|
||||
class UnmappedColumnError(sa_exc.InvalidRequestError):
|
||||
"""Mapping operation was requested on an unknown column."""
|
||||
|
||||
|
||||
class LoaderStrategyException(sa_exc.InvalidRequestError):
|
||||
"""A loader strategy for an attribute does not exist."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
applied_to_property_type: Type[Any],
|
||||
requesting_property: MapperProperty[Any],
|
||||
applies_to: Optional[Type[MapperProperty[Any]]],
|
||||
actual_strategy_type: Optional[Type[LoaderStrategy]],
|
||||
strategy_key: Tuple[Any, ...],
|
||||
):
|
||||
if actual_strategy_type is None:
|
||||
sa_exc.InvalidRequestError.__init__(
|
||||
self,
|
||||
"Can't find strategy %s for %s"
|
||||
% (strategy_key, requesting_property),
|
||||
)
|
||||
else:
|
||||
assert applies_to is not None
|
||||
sa_exc.InvalidRequestError.__init__(
|
||||
self,
|
||||
'Can\'t apply "%s" strategy to property "%s", '
|
||||
'which is a "%s"; this loader strategy is intended '
|
||||
'to be used with a "%s".'
|
||||
% (
|
||||
util.clsname_as_plain_name(actual_strategy_type),
|
||||
requesting_property,
|
||||
_mapper_property_as_plain_name(applied_to_property_type),
|
||||
_mapper_property_as_plain_name(applies_to),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _safe_cls_name(cls: Type[Any]) -> str:
|
||||
cls_name: Optional[str]
|
||||
try:
|
||||
cls_name = ".".join((cls.__module__, cls.__name__))
|
||||
except AttributeError:
|
||||
cls_name = getattr(cls, "__name__", None)
|
||||
if cls_name is None:
|
||||
cls_name = repr(cls)
|
||||
return cls_name
|
||||
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.base")
|
||||
def _default_unmapped(cls: Type[Any]) -> Optional[str]:
|
||||
base = util.preloaded.orm_base
|
||||
|
||||
try:
|
||||
mappers = base.manager_of_class(cls).mappers # type: ignore
|
||||
except (
|
||||
UnmappedClassError,
|
||||
TypeError,
|
||||
) + NO_STATE:
|
||||
mappers = {}
|
||||
name = _safe_cls_name(cls)
|
||||
|
||||
if not mappers:
|
||||
return f"Class '{name}' is not mapped"
|
||||
else:
|
||||
return None
|
302
venv/lib/python3.11/site-packages/sqlalchemy/orm/identity.py
Normal file
302
venv/lib/python3.11/site-packages/sqlalchemy/orm/identity.py
Normal file
@ -0,0 +1,302 @@
|
||||
# orm/identity.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
import weakref
|
||||
|
||||
from . import util as orm_util
|
||||
from .. import exc as sa_exc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _IdentityKeyType
|
||||
from .state import InstanceState
|
||||
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
_O = TypeVar("_O", bound=object)
|
||||
|
||||
|
||||
class IdentityMap:
|
||||
_wr: weakref.ref[IdentityMap]
|
||||
|
||||
_dict: Dict[_IdentityKeyType[Any], Any]
|
||||
_modified: Set[InstanceState[Any]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._dict = {}
|
||||
self._modified = set()
|
||||
self._wr = weakref.ref(self)
|
||||
|
||||
def _kill(self) -> None:
|
||||
self._add_unpresent = _killed # type: ignore
|
||||
|
||||
def all_states(self) -> List[InstanceState[Any]]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def contains_state(self, state: InstanceState[Any]) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __contains__(self, key: _IdentityKeyType[Any]) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
def safe_discard(self, state: InstanceState[Any]) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __getitem__(self, key: _IdentityKeyType[_O]) -> _O:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(
|
||||
self, key: _IdentityKeyType[_O], default: Optional[_O] = None
|
||||
) -> Optional[_O]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def fast_get_state(
|
||||
self, key: _IdentityKeyType[_O]
|
||||
) -> Optional[InstanceState[_O]]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def keys(self) -> Iterable[_IdentityKeyType[Any]]:
|
||||
return self._dict.keys()
|
||||
|
||||
def values(self) -> Iterable[object]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def replace(self, state: InstanceState[_O]) -> Optional[InstanceState[_O]]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def add(self, state: InstanceState[Any]) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _fast_discard(self, state: InstanceState[Any]) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _add_unpresent(
|
||||
self, state: InstanceState[Any], key: _IdentityKeyType[Any]
|
||||
) -> None:
|
||||
"""optional inlined form of add() which can assume item isn't present
|
||||
in the map"""
|
||||
self.add(state)
|
||||
|
||||
def _manage_incoming_state(self, state: InstanceState[Any]) -> None:
|
||||
state._instance_dict = self._wr
|
||||
|
||||
if state.modified:
|
||||
self._modified.add(state)
|
||||
|
||||
def _manage_removed_state(self, state: InstanceState[Any]) -> None:
|
||||
del state._instance_dict
|
||||
if state.modified:
|
||||
self._modified.discard(state)
|
||||
|
||||
def _dirty_states(self) -> Set[InstanceState[Any]]:
|
||||
return self._modified
|
||||
|
||||
def check_modified(self) -> bool:
|
||||
"""return True if any InstanceStates present have been marked
|
||||
as 'modified'.
|
||||
|
||||
"""
|
||||
return bool(self._modified)
|
||||
|
||||
def has_key(self, key: _IdentityKeyType[Any]) -> bool:
|
||||
return key in self
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._dict)
|
||||
|
||||
|
||||
class WeakInstanceDict(IdentityMap):
|
||||
_dict: Dict[_IdentityKeyType[Any], InstanceState[Any]]
|
||||
|
||||
def __getitem__(self, key: _IdentityKeyType[_O]) -> _O:
|
||||
state = cast("InstanceState[_O]", self._dict[key])
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
raise KeyError(key)
|
||||
return o
|
||||
|
||||
def __contains__(self, key: _IdentityKeyType[Any]) -> bool:
|
||||
try:
|
||||
if key in self._dict:
|
||||
state = self._dict[key]
|
||||
o = state.obj()
|
||||
else:
|
||||
return False
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return o is not None
|
||||
|
||||
def contains_state(self, state: InstanceState[Any]) -> bool:
|
||||
if state.key in self._dict:
|
||||
if TYPE_CHECKING:
|
||||
assert state.key is not None
|
||||
try:
|
||||
return self._dict[state.key] is state
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def replace(
|
||||
self, state: InstanceState[Any]
|
||||
) -> Optional[InstanceState[Any]]:
|
||||
assert state.key is not None
|
||||
if state.key in self._dict:
|
||||
try:
|
||||
existing = existing_non_none = self._dict[state.key]
|
||||
except KeyError:
|
||||
# catch gc removed the key after we just checked for it
|
||||
existing = None
|
||||
else:
|
||||
if existing_non_none is not state:
|
||||
self._manage_removed_state(existing_non_none)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
existing = None
|
||||
|
||||
self._dict[state.key] = state
|
||||
self._manage_incoming_state(state)
|
||||
return existing
|
||||
|
||||
def add(self, state: InstanceState[Any]) -> bool:
|
||||
key = state.key
|
||||
assert key is not None
|
||||
# inline of self.__contains__
|
||||
if key in self._dict:
|
||||
try:
|
||||
existing_state = self._dict[key]
|
||||
except KeyError:
|
||||
# catch gc removed the key after we just checked for it
|
||||
pass
|
||||
else:
|
||||
if existing_state is not state:
|
||||
o = existing_state.obj()
|
||||
if o is not None:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Can't attach instance "
|
||||
"%s; another instance with key %s is already "
|
||||
"present in this session."
|
||||
% (orm_util.state_str(state), state.key)
|
||||
)
|
||||
else:
|
||||
return False
|
||||
self._dict[key] = state
|
||||
self._manage_incoming_state(state)
|
||||
return True
|
||||
|
||||
def _add_unpresent(
|
||||
self, state: InstanceState[Any], key: _IdentityKeyType[Any]
|
||||
) -> None:
|
||||
# inlined form of add() called by loading.py
|
||||
self._dict[key] = state
|
||||
state._instance_dict = self._wr
|
||||
|
||||
def fast_get_state(
|
||||
self, key: _IdentityKeyType[_O]
|
||||
) -> Optional[InstanceState[_O]]:
|
||||
return self._dict.get(key)
|
||||
|
||||
def get(
|
||||
self, key: _IdentityKeyType[_O], default: Optional[_O] = None
|
||||
) -> Optional[_O]:
|
||||
if key not in self._dict:
|
||||
return default
|
||||
try:
|
||||
state = cast("InstanceState[_O]", self._dict[key])
|
||||
except KeyError:
|
||||
# catch gc removed the key after we just checked for it
|
||||
return default
|
||||
else:
|
||||
o = state.obj()
|
||||
if o is None:
|
||||
return default
|
||||
return o
|
||||
|
||||
def items(self) -> List[Tuple[_IdentityKeyType[Any], InstanceState[Any]]]:
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
key = state.key
|
||||
assert key is not None
|
||||
if value is not None:
|
||||
result.append((key, value))
|
||||
return result
|
||||
|
||||
def values(self) -> List[object]:
|
||||
values = self.all_states()
|
||||
result = []
|
||||
for state in values:
|
||||
value = state.obj()
|
||||
if value is not None:
|
||||
result.append(value)
|
||||
|
||||
return result
|
||||
|
||||
def __iter__(self) -> Iterator[_IdentityKeyType[Any]]:
|
||||
return iter(self.keys())
|
||||
|
||||
def all_states(self) -> List[InstanceState[Any]]:
|
||||
return list(self._dict.values())
|
||||
|
||||
def _fast_discard(self, state: InstanceState[Any]) -> None:
|
||||
# used by InstanceState for state being
|
||||
# GC'ed, inlines _managed_removed_state
|
||||
key = state.key
|
||||
assert key is not None
|
||||
try:
|
||||
st = self._dict[key]
|
||||
except KeyError:
|
||||
# catch gc removed the key after we just checked for it
|
||||
pass
|
||||
else:
|
||||
if st is state:
|
||||
self._dict.pop(key, None)
|
||||
|
||||
def discard(self, state: InstanceState[Any]) -> None:
|
||||
self.safe_discard(state)
|
||||
|
||||
def safe_discard(self, state: InstanceState[Any]) -> None:
|
||||
key = state.key
|
||||
if key in self._dict:
|
||||
assert key is not None
|
||||
try:
|
||||
st = self._dict[key]
|
||||
except KeyError:
|
||||
# catch gc removed the key after we just checked for it
|
||||
pass
|
||||
else:
|
||||
if st is state:
|
||||
self._dict.pop(key, None)
|
||||
self._manage_removed_state(state)
|
||||
|
||||
|
||||
def _killed(state: InstanceState[Any], key: _IdentityKeyType[Any]) -> NoReturn:
|
||||
# external function to avoid creating cycles when assigned to
|
||||
# the IdentityMap
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Object %s cannot be converted to 'persistent' state, as this "
|
||||
"identity map is no longer valid. Has the owning Session "
|
||||
"been closed?" % orm_util.state_str(state),
|
||||
code="lkrp",
|
||||
)
|
@ -0,0 +1,754 @@
|
||||
# orm/instrumentation.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: allow-untyped-defs, allow-untyped-calls
|
||||
|
||||
"""Defines SQLAlchemy's system of class instrumentation.
|
||||
|
||||
This module is usually not directly visible to user applications, but
|
||||
defines a large part of the ORM's interactivity.
|
||||
|
||||
instrumentation.py deals with registration of end-user classes
|
||||
for state tracking. It interacts closely with state.py
|
||||
and attributes.py which establish per-instance and per-class-attribute
|
||||
instrumentation, respectively.
|
||||
|
||||
The class instrumentation system can be customized on a per-class
|
||||
or global basis using the :mod:`sqlalchemy.ext.instrumentation`
|
||||
module, which provides the means to build and specify
|
||||
alternate instrumentation forms.
|
||||
|
||||
.. versionchanged: 0.8
|
||||
The instrumentation extension system was moved out of the
|
||||
ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
|
||||
package. When that package is imported, it installs
|
||||
itself within sqlalchemy.orm so that its more comprehensive
|
||||
resolution mechanics take effect.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
import weakref
|
||||
|
||||
from . import base
|
||||
from . import collections
|
||||
from . import exc
|
||||
from . import interfaces
|
||||
from . import state
|
||||
from ._typing import _O
|
||||
from .attributes import _is_collection_attribute_impl
|
||||
from .. import util
|
||||
from ..event import EventTarget
|
||||
from ..util import HasMemoized
|
||||
from ..util.typing import Literal
|
||||
from ..util.typing import Protocol
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _RegistryType
|
||||
from .attributes import AttributeImpl
|
||||
from .attributes import QueryableAttribute
|
||||
from .collections import _AdaptedCollectionProtocol
|
||||
from .collections import _CollectionFactoryType
|
||||
from .decl_base import _MapperConfig
|
||||
from .events import InstanceEvents
|
||||
from .mapper import Mapper
|
||||
from .state import InstanceState
|
||||
from ..event import dispatcher
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
DEL_ATTR = util.symbol("DEL_ATTR")
|
||||
|
||||
|
||||
class _ExpiredAttributeLoaderProto(Protocol):
|
||||
def __call__(
|
||||
self,
|
||||
state: state.InstanceState[Any],
|
||||
toload: Set[str],
|
||||
passive: base.PassiveFlag,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class _ManagerFactory(Protocol):
|
||||
def __call__(self, class_: Type[_O]) -> ClassManager[_O]: ...
|
||||
|
||||
|
||||
class ClassManager(
|
||||
HasMemoized,
|
||||
Dict[str, "QueryableAttribute[Any]"],
|
||||
Generic[_O],
|
||||
EventTarget,
|
||||
):
|
||||
"""Tracks state information at the class level."""
|
||||
|
||||
dispatch: dispatcher[ClassManager[_O]]
|
||||
|
||||
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
|
||||
STATE_ATTR = base.DEFAULT_STATE_ATTR
|
||||
|
||||
_state_setter = staticmethod(util.attrsetter(STATE_ATTR))
|
||||
|
||||
expired_attribute_loader: _ExpiredAttributeLoaderProto
|
||||
"previously known as deferred_scalar_loader"
|
||||
|
||||
init_method: Optional[Callable[..., None]]
|
||||
original_init: Optional[Callable[..., None]] = None
|
||||
|
||||
factory: Optional[_ManagerFactory]
|
||||
|
||||
declarative_scan: Optional[weakref.ref[_MapperConfig]] = None
|
||||
|
||||
registry: _RegistryType
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# starts as None during setup
|
||||
registry = None
|
||||
|
||||
class_: Type[_O]
|
||||
|
||||
_bases: List[ClassManager[Any]]
|
||||
|
||||
@property
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
message="The ClassManager.deferred_scalar_loader attribute is now "
|
||||
"named expired_attribute_loader",
|
||||
)
|
||||
def deferred_scalar_loader(self):
|
||||
return self.expired_attribute_loader
|
||||
|
||||
@deferred_scalar_loader.setter
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
message="The ClassManager.deferred_scalar_loader attribute is now "
|
||||
"named expired_attribute_loader",
|
||||
)
|
||||
def deferred_scalar_loader(self, obj):
|
||||
self.expired_attribute_loader = obj
|
||||
|
||||
def __init__(self, class_):
|
||||
self.class_ = class_
|
||||
self.info = {}
|
||||
self.new_init = None
|
||||
self.local_attrs = {}
|
||||
self.originals = {}
|
||||
self._finalized = False
|
||||
self.factory = None
|
||||
self.init_method = None
|
||||
|
||||
self._bases = [
|
||||
mgr
|
||||
for mgr in cast(
|
||||
"List[Optional[ClassManager[Any]]]",
|
||||
[
|
||||
opt_manager_of_class(base)
|
||||
for base in self.class_.__bases__
|
||||
if isinstance(base, type)
|
||||
],
|
||||
)
|
||||
if mgr is not None
|
||||
]
|
||||
|
||||
for base_ in self._bases:
|
||||
self.update(base_)
|
||||
|
||||
cast(
|
||||
"InstanceEvents", self.dispatch._events
|
||||
)._new_classmanager_instance(class_, self)
|
||||
|
||||
for basecls in class_.__mro__:
|
||||
mgr = opt_manager_of_class(basecls)
|
||||
if mgr is not None:
|
||||
self.dispatch._update(mgr.dispatch)
|
||||
|
||||
self.manage()
|
||||
|
||||
if "__del__" in class_.__dict__:
|
||||
util.warn(
|
||||
"__del__() method on class %s will "
|
||||
"cause unreachable cycles and memory leaks, "
|
||||
"as SQLAlchemy instrumentation often creates "
|
||||
"reference cycles. Please remove this method." % class_
|
||||
)
|
||||
|
||||
def _update_state(
|
||||
self,
|
||||
finalize: bool = False,
|
||||
mapper: Optional[Mapper[_O]] = None,
|
||||
registry: Optional[_RegistryType] = None,
|
||||
declarative_scan: Optional[_MapperConfig] = None,
|
||||
expired_attribute_loader: Optional[
|
||||
_ExpiredAttributeLoaderProto
|
||||
] = None,
|
||||
init_method: Optional[Callable[..., None]] = None,
|
||||
) -> None:
|
||||
if mapper:
|
||||
self.mapper = mapper #
|
||||
if registry:
|
||||
registry._add_manager(self)
|
||||
if declarative_scan:
|
||||
self.declarative_scan = weakref.ref(declarative_scan)
|
||||
if expired_attribute_loader:
|
||||
self.expired_attribute_loader = expired_attribute_loader
|
||||
|
||||
if init_method:
|
||||
assert not self._finalized, (
|
||||
"class is already instrumented, "
|
||||
"init_method %s can't be applied" % init_method
|
||||
)
|
||||
self.init_method = init_method
|
||||
|
||||
if not self._finalized:
|
||||
self.original_init = (
|
||||
self.init_method
|
||||
if self.init_method is not None
|
||||
and self.class_.__init__ is object.__init__
|
||||
else self.class_.__init__
|
||||
)
|
||||
|
||||
if finalize and not self._finalized:
|
||||
self._finalize()
|
||||
|
||||
def _finalize(self) -> None:
|
||||
if self._finalized:
|
||||
return
|
||||
self._finalized = True
|
||||
|
||||
self._instrument_init()
|
||||
|
||||
_instrumentation_factory.dispatch.class_instrument(self.class_)
|
||||
|
||||
def __hash__(self) -> int: # type: ignore[override]
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return other is self
|
||||
|
||||
@property
|
||||
def is_mapped(self) -> bool:
|
||||
return "mapper" in self.__dict__
|
||||
|
||||
@HasMemoized.memoized_attribute
|
||||
def _all_key_set(self):
|
||||
return frozenset(self)
|
||||
|
||||
@HasMemoized.memoized_attribute
|
||||
def _collection_impl_keys(self):
|
||||
return frozenset(
|
||||
[attr.key for attr in self.values() if attr.impl.collection]
|
||||
)
|
||||
|
||||
@HasMemoized.memoized_attribute
|
||||
def _scalar_loader_impls(self):
|
||||
return frozenset(
|
||||
[
|
||||
attr.impl
|
||||
for attr in self.values()
|
||||
if attr.impl.accepts_scalar_loader
|
||||
]
|
||||
)
|
||||
|
||||
@HasMemoized.memoized_attribute
|
||||
def _loader_impls(self):
|
||||
return frozenset([attr.impl for attr in self.values()])
|
||||
|
||||
@util.memoized_property
|
||||
def mapper(self) -> Mapper[_O]:
|
||||
# raises unless self.mapper has been assigned
|
||||
raise exc.UnmappedClassError(self.class_)
|
||||
|
||||
def _all_sqla_attributes(self, exclude=None):
|
||||
"""return an iterator of all classbound attributes that are
|
||||
implement :class:`.InspectionAttr`.
|
||||
|
||||
This includes :class:`.QueryableAttribute` as well as extension
|
||||
types such as :class:`.hybrid_property` and
|
||||
:class:`.AssociationProxy`.
|
||||
|
||||
"""
|
||||
|
||||
found: Dict[str, Any] = {}
|
||||
|
||||
# constraints:
|
||||
# 1. yield keys in cls.__dict__ order
|
||||
# 2. if a subclass has the same key as a superclass, include that
|
||||
# key as part of the ordering of the superclass, because an
|
||||
# overridden key is usually installed by the mapper which is going
|
||||
# on a different ordering
|
||||
# 3. don't use getattr() as this fires off descriptors
|
||||
|
||||
for supercls in self.class_.__mro__[0:-1]:
|
||||
inherits = supercls.__mro__[1]
|
||||
for key in supercls.__dict__:
|
||||
found.setdefault(key, supercls)
|
||||
if key in inherits.__dict__:
|
||||
continue
|
||||
val = found[key].__dict__[key]
|
||||
if (
|
||||
isinstance(val, interfaces.InspectionAttr)
|
||||
and val.is_attribute
|
||||
):
|
||||
yield key, val
|
||||
|
||||
def _get_class_attr_mro(self, key, default=None):
|
||||
"""return an attribute on the class without tripping it."""
|
||||
|
||||
for supercls in self.class_.__mro__:
|
||||
if key in supercls.__dict__:
|
||||
return supercls.__dict__[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def _attr_has_impl(self, key: str) -> bool:
|
||||
"""Return True if the given attribute is fully initialized.
|
||||
|
||||
i.e. has an impl.
|
||||
"""
|
||||
|
||||
return key in self and self[key].impl is not None
|
||||
|
||||
def _subclass_manager(self, cls: Type[_T]) -> ClassManager[_T]:
|
||||
"""Create a new ClassManager for a subclass of this ClassManager's
|
||||
class.
|
||||
|
||||
This is called automatically when attributes are instrumented so that
|
||||
the attributes can be propagated to subclasses against their own
|
||||
class-local manager, without the need for mappers etc. to have already
|
||||
pre-configured managers for the full class hierarchy. Mappers
|
||||
can post-configure the auto-generated ClassManager when needed.
|
||||
|
||||
"""
|
||||
return register_class(cls, finalize=False)
|
||||
|
||||
def _instrument_init(self):
|
||||
self.new_init = _generate_init(self.class_, self, self.original_init)
|
||||
self.install_member("__init__", self.new_init)
|
||||
|
||||
@util.memoized_property
|
||||
def _state_constructor(self) -> Type[state.InstanceState[_O]]:
|
||||
self.dispatch.first_init(self, self.class_)
|
||||
return state.InstanceState
|
||||
|
||||
def manage(self):
|
||||
"""Mark this instance as the manager for its class."""
|
||||
|
||||
setattr(self.class_, self.MANAGER_ATTR, self)
|
||||
|
||||
@util.hybridmethod
|
||||
def manager_getter(self):
|
||||
return _default_manager_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def state_getter(self):
|
||||
"""Return a (instance) -> InstanceState callable.
|
||||
|
||||
"state getter" callables should raise either KeyError or
|
||||
AttributeError if no InstanceState could be found for the
|
||||
instance.
|
||||
"""
|
||||
|
||||
return _default_state_getter
|
||||
|
||||
@util.hybridmethod
|
||||
def dict_getter(self):
|
||||
return _default_dict_getter
|
||||
|
||||
def instrument_attribute(
|
||||
self,
|
||||
key: str,
|
||||
inst: QueryableAttribute[Any],
|
||||
propagated: bool = False,
|
||||
) -> None:
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't override local attr with inherited attr
|
||||
else:
|
||||
self.local_attrs[key] = inst
|
||||
self.install_descriptor(key, inst)
|
||||
self._reset_memoizations()
|
||||
self[key] = inst
|
||||
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = self._subclass_manager(cls)
|
||||
manager.instrument_attribute(key, inst, True)
|
||||
|
||||
def subclass_managers(self, recursive):
|
||||
for cls in self.class_.__subclasses__():
|
||||
mgr = opt_manager_of_class(cls)
|
||||
if mgr is not None and mgr is not self:
|
||||
yield mgr
|
||||
if recursive:
|
||||
yield from mgr.subclass_managers(True)
|
||||
|
||||
def post_configure_attribute(self, key):
|
||||
_instrumentation_factory.dispatch.attribute_instrument(
|
||||
self.class_, key, self[key]
|
||||
)
|
||||
|
||||
def uninstrument_attribute(self, key, propagated=False):
|
||||
if key not in self:
|
||||
return
|
||||
if propagated:
|
||||
if key in self.local_attrs:
|
||||
return # don't get rid of local attr
|
||||
else:
|
||||
del self.local_attrs[key]
|
||||
self.uninstall_descriptor(key)
|
||||
self._reset_memoizations()
|
||||
del self[key]
|
||||
for cls in self.class_.__subclasses__():
|
||||
manager = opt_manager_of_class(cls)
|
||||
if manager:
|
||||
manager.uninstrument_attribute(key, True)
|
||||
|
||||
def unregister(self) -> None:
|
||||
"""remove all instrumentation established by this ClassManager."""
|
||||
|
||||
for key in list(self.originals):
|
||||
self.uninstall_member(key)
|
||||
|
||||
self.mapper = None
|
||||
self.dispatch = None # type: ignore
|
||||
self.new_init = None
|
||||
self.info.clear()
|
||||
|
||||
for key in list(self):
|
||||
if key in self.local_attrs:
|
||||
self.uninstrument_attribute(key)
|
||||
|
||||
if self.MANAGER_ATTR in self.class_.__dict__:
|
||||
delattr(self.class_, self.MANAGER_ATTR)
|
||||
|
||||
def install_descriptor(
|
||||
self, key: str, inst: QueryableAttribute[Any]
|
||||
) -> None:
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError(
|
||||
"%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." % key
|
||||
)
|
||||
setattr(self.class_, key, inst)
|
||||
|
||||
def uninstall_descriptor(self, key: str) -> None:
|
||||
delattr(self.class_, key)
|
||||
|
||||
def install_member(self, key: str, implementation: Any) -> None:
|
||||
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
|
||||
raise KeyError(
|
||||
"%r: requested attribute name conflicts with "
|
||||
"instrumentation attribute of the same name." % key
|
||||
)
|
||||
self.originals.setdefault(key, self.class_.__dict__.get(key, DEL_ATTR))
|
||||
setattr(self.class_, key, implementation)
|
||||
|
||||
def uninstall_member(self, key: str) -> None:
|
||||
original = self.originals.pop(key, None)
|
||||
if original is not DEL_ATTR:
|
||||
setattr(self.class_, key, original)
|
||||
else:
|
||||
delattr(self.class_, key)
|
||||
|
||||
def instrument_collection_class(
|
||||
self, key: str, collection_class: Type[Collection[Any]]
|
||||
) -> _CollectionFactoryType:
|
||||
return collections.prepare_instrumentation(collection_class)
|
||||
|
||||
def initialize_collection(
|
||||
self,
|
||||
key: str,
|
||||
state: InstanceState[_O],
|
||||
factory: _CollectionFactoryType,
|
||||
) -> Tuple[collections.CollectionAdapter, _AdaptedCollectionProtocol]:
|
||||
user_data = factory()
|
||||
impl = self.get_impl(key)
|
||||
assert _is_collection_attribute_impl(impl)
|
||||
adapter = collections.CollectionAdapter(impl, state, user_data)
|
||||
return adapter, user_data
|
||||
|
||||
def is_instrumented(self, key: str, search: bool = False) -> bool:
|
||||
if search:
|
||||
return key in self
|
||||
else:
|
||||
return key in self.local_attrs
|
||||
|
||||
def get_impl(self, key: str) -> AttributeImpl:
|
||||
return self[key].impl
|
||||
|
||||
@property
|
||||
def attributes(self) -> Iterable[Any]:
|
||||
return iter(self.values())
|
||||
|
||||
# InstanceState management
|
||||
|
||||
def new_instance(self, state: Optional[InstanceState[_O]] = None) -> _O:
|
||||
# here, we would prefer _O to be bound to "object"
|
||||
# so that mypy sees that __new__ is present. currently
|
||||
# it's bound to Any as there were other problems not having
|
||||
# it that way but these can be revisited
|
||||
instance = self.class_.__new__(self.class_)
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
return instance
|
||||
|
||||
def setup_instance(
|
||||
self, instance: _O, state: Optional[InstanceState[_O]] = None
|
||||
) -> None:
|
||||
if state is None:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
|
||||
def teardown_instance(self, instance: _O) -> None:
|
||||
delattr(instance, self.STATE_ATTR)
|
||||
|
||||
def _serialize(
|
||||
self, state: InstanceState[_O], state_dict: Dict[str, Any]
|
||||
) -> _SerializeManager:
|
||||
return _SerializeManager(state, state_dict)
|
||||
|
||||
def _new_state_if_none(
|
||||
self, instance: _O
|
||||
) -> Union[Literal[False], InstanceState[_O]]:
|
||||
"""Install a default InstanceState if none is present.
|
||||
|
||||
A private convenience method used by the __init__ decorator.
|
||||
|
||||
"""
|
||||
if hasattr(instance, self.STATE_ATTR):
|
||||
return False
|
||||
elif self.class_ is not instance.__class__ and self.is_mapped:
|
||||
# this will create a new ClassManager for the
|
||||
# subclass, without a mapper. This is likely a
|
||||
# user error situation but allow the object
|
||||
# to be constructed, so that it is usable
|
||||
# in a non-ORM context at least.
|
||||
return self._subclass_manager(
|
||||
instance.__class__
|
||||
)._new_state_if_none(instance)
|
||||
else:
|
||||
state = self._state_constructor(instance, self)
|
||||
self._state_setter(instance, state)
|
||||
return state
|
||||
|
||||
def has_state(self, instance: _O) -> bool:
|
||||
return hasattr(instance, self.STATE_ATTR)
|
||||
|
||||
def has_parent(
|
||||
self, state: InstanceState[_O], key: str, optimistic: bool = False
|
||||
) -> bool:
|
||||
"""TODO"""
|
||||
return self.get_impl(key).hasparent(state, optimistic=optimistic)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""All ClassManagers are non-zero regardless of attribute state."""
|
||||
return True
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<%s of %r at %x>" % (
|
||||
self.__class__.__name__,
|
||||
self.class_,
|
||||
id(self),
|
||||
)
|
||||
|
||||
|
||||
class _SerializeManager:
|
||||
"""Provide serialization of a :class:`.ClassManager`.
|
||||
|
||||
The :class:`.InstanceState` uses ``__init__()`` on serialize
|
||||
and ``__call__()`` on deserialize.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, state: state.InstanceState[Any], d: Dict[str, Any]):
|
||||
self.class_ = state.class_
|
||||
manager = state.manager
|
||||
manager.dispatch.pickle(state, d)
|
||||
|
||||
def __call__(self, state, inst, state_dict):
|
||||
state.manager = manager = opt_manager_of_class(self.class_)
|
||||
if manager is None:
|
||||
raise exc.UnmappedInstanceError(
|
||||
inst,
|
||||
"Cannot deserialize object of type %r - "
|
||||
"no mapper() has "
|
||||
"been configured for this class within the current "
|
||||
"Python process!" % self.class_,
|
||||
)
|
||||
elif manager.is_mapped and not manager.mapper.configured:
|
||||
manager.mapper._check_configure()
|
||||
|
||||
# setup _sa_instance_state ahead of time so that
|
||||
# unpickle events can access the object normally.
|
||||
# see [ticket:2362]
|
||||
if inst is not None:
|
||||
manager.setup_instance(inst, state)
|
||||
manager.dispatch.unpickle(state, state_dict)
|
||||
|
||||
|
||||
class InstrumentationFactory(EventTarget):
|
||||
"""Factory for new ClassManager instances."""
|
||||
|
||||
dispatch: dispatcher[InstrumentationFactory]
|
||||
|
||||
def create_manager_for_cls(self, class_: Type[_O]) -> ClassManager[_O]:
|
||||
assert class_ is not None
|
||||
assert opt_manager_of_class(class_) is None
|
||||
|
||||
# give a more complicated subclass
|
||||
# a chance to do what it wants here
|
||||
manager, factory = self._locate_extended_factory(class_)
|
||||
|
||||
if factory is None:
|
||||
factory = ClassManager
|
||||
manager = ClassManager(class_)
|
||||
else:
|
||||
assert manager is not None
|
||||
|
||||
self._check_conflicts(class_, factory)
|
||||
|
||||
manager.factory = factory
|
||||
|
||||
return manager
|
||||
|
||||
def _locate_extended_factory(
|
||||
self, class_: Type[_O]
|
||||
) -> Tuple[Optional[ClassManager[_O]], Optional[_ManagerFactory]]:
|
||||
"""Overridden by a subclass to do an extended lookup."""
|
||||
return None, None
|
||||
|
||||
def _check_conflicts(
|
||||
self, class_: Type[_O], factory: Callable[[Type[_O]], ClassManager[_O]]
|
||||
) -> None:
|
||||
"""Overridden by a subclass to test for conflicting factories."""
|
||||
|
||||
def unregister(self, class_: Type[_O]) -> None:
|
||||
manager = manager_of_class(class_)
|
||||
manager.unregister()
|
||||
self.dispatch.class_uninstrument(class_)
|
||||
|
||||
|
||||
# this attribute is replaced by sqlalchemy.ext.instrumentation
|
||||
# when imported.
|
||||
_instrumentation_factory = InstrumentationFactory()
|
||||
|
||||
# these attributes are replaced by sqlalchemy.ext.instrumentation
|
||||
# when a non-standard InstrumentationManager class is first
|
||||
# used to instrument a class.
|
||||
instance_state = _default_state_getter = base.instance_state
|
||||
|
||||
instance_dict = _default_dict_getter = base.instance_dict
|
||||
|
||||
manager_of_class = _default_manager_getter = base.manager_of_class
|
||||
opt_manager_of_class = _default_opt_manager_getter = base.opt_manager_of_class
|
||||
|
||||
|
||||
def register_class(
|
||||
class_: Type[_O],
|
||||
finalize: bool = True,
|
||||
mapper: Optional[Mapper[_O]] = None,
|
||||
registry: Optional[_RegistryType] = None,
|
||||
declarative_scan: Optional[_MapperConfig] = None,
|
||||
expired_attribute_loader: Optional[_ExpiredAttributeLoaderProto] = None,
|
||||
init_method: Optional[Callable[..., None]] = None,
|
||||
) -> ClassManager[_O]:
|
||||
"""Register class instrumentation.
|
||||
|
||||
Returns the existing or newly created class manager.
|
||||
|
||||
"""
|
||||
|
||||
manager = opt_manager_of_class(class_)
|
||||
if manager is None:
|
||||
manager = _instrumentation_factory.create_manager_for_cls(class_)
|
||||
manager._update_state(
|
||||
mapper=mapper,
|
||||
registry=registry,
|
||||
declarative_scan=declarative_scan,
|
||||
expired_attribute_loader=expired_attribute_loader,
|
||||
init_method=init_method,
|
||||
finalize=finalize,
|
||||
)
|
||||
|
||||
return manager
|
||||
|
||||
|
||||
def unregister_class(class_):
|
||||
"""Unregister class instrumentation."""
|
||||
|
||||
_instrumentation_factory.unregister(class_)
|
||||
|
||||
|
||||
def is_instrumented(instance, key):
|
||||
"""Return True if the given attribute on the given instance is
|
||||
instrumented by the attributes package.
|
||||
|
||||
This function may be used regardless of instrumentation
|
||||
applied directly to the class, i.e. no descriptors are required.
|
||||
|
||||
"""
|
||||
return manager_of_class(instance.__class__).is_instrumented(
|
||||
key, search=True
|
||||
)
|
||||
|
||||
|
||||
def _generate_init(class_, class_manager, original_init):
|
||||
"""Build an __init__ decorator that triggers ClassManager events."""
|
||||
|
||||
# TODO: we should use the ClassManager's notion of the
|
||||
# original '__init__' method, once ClassManager is fixed
|
||||
# to always reference that.
|
||||
|
||||
if original_init is None:
|
||||
original_init = class_.__init__
|
||||
|
||||
# Go through some effort here and don't change the user's __init__
|
||||
# calling signature, including the unlikely case that it has
|
||||
# a return value.
|
||||
# FIXME: need to juggle local names to avoid constructor argument
|
||||
# clashes.
|
||||
func_body = """\
|
||||
def __init__(%(apply_pos)s):
|
||||
new_state = class_manager._new_state_if_none(%(self_arg)s)
|
||||
if new_state:
|
||||
return new_state._initialize_instance(%(apply_kw)s)
|
||||
else:
|
||||
return original_init(%(apply_kw)s)
|
||||
"""
|
||||
func_vars = util.format_argspec_init(original_init, grouped=False)
|
||||
func_text = func_body % func_vars
|
||||
|
||||
func_defaults = getattr(original_init, "__defaults__", None)
|
||||
func_kw_defaults = getattr(original_init, "__kwdefaults__", None)
|
||||
|
||||
env = locals().copy()
|
||||
env["__name__"] = __name__
|
||||
exec(func_text, env)
|
||||
__init__ = env["__init__"]
|
||||
__init__.__doc__ = original_init.__doc__
|
||||
__init__._sa_original_init = original_init
|
||||
|
||||
if func_defaults:
|
||||
__init__.__defaults__ = func_defaults
|
||||
if func_kw_defaults:
|
||||
__init__.__kwdefaults__ = func_kw_defaults
|
||||
|
||||
return __init__
|
1490
venv/lib/python3.11/site-packages/sqlalchemy/orm/interfaces.py
Normal file
1490
venv/lib/python3.11/site-packages/sqlalchemy/orm/interfaces.py
Normal file
File diff suppressed because it is too large
Load Diff
1682
venv/lib/python3.11/site-packages/sqlalchemy/orm/loading.py
Normal file
1682
venv/lib/python3.11/site-packages/sqlalchemy/orm/loading.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,557 @@
|
||||
# orm/mapped_collection.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from . import base
|
||||
from .collections import collection
|
||||
from .collections import collection_adapter
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
from ..sql import coercions
|
||||
from ..sql import expression
|
||||
from ..sql import roles
|
||||
from ..util.langhelpers import Missing
|
||||
from ..util.langhelpers import MissingOr
|
||||
from ..util.typing import Literal
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AttributeEventToken
|
||||
from . import Mapper
|
||||
from .collections import CollectionAdapter
|
||||
from ..sql.elements import ColumnElement
|
||||
|
||||
_KT = TypeVar("_KT", bound=Any)
|
||||
_VT = TypeVar("_VT", bound=Any)
|
||||
|
||||
|
||||
class _PlainColumnGetter(Generic[_KT]):
|
||||
"""Plain column getter, stores collection of Column objects
|
||||
directly.
|
||||
|
||||
Serializes to a :class:`._SerializableColumnGetterV2`
|
||||
which has more expensive __call__() performance
|
||||
and some rare caveats.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ("cols", "composite")
|
||||
|
||||
def __init__(self, cols: Sequence[ColumnElement[_KT]]) -> None:
|
||||
self.cols = cols
|
||||
self.composite = len(cols) > 1
|
||||
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Type[_SerializableColumnGetterV2[_KT]],
|
||||
Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
|
||||
]:
|
||||
return _SerializableColumnGetterV2._reduce_from_cols(self.cols)
|
||||
|
||||
def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]:
|
||||
return self.cols
|
||||
|
||||
def __call__(self, value: _KT) -> MissingOr[Union[_KT, Tuple[_KT, ...]]]:
|
||||
state = base.instance_state(value)
|
||||
m = base._state_mapper(state)
|
||||
|
||||
key: List[_KT] = [
|
||||
m._get_state_attr_by_column(state, state.dict, col)
|
||||
for col in self._cols(m)
|
||||
]
|
||||
if self.composite:
|
||||
return tuple(key)
|
||||
else:
|
||||
obj = key[0]
|
||||
if obj is None:
|
||||
return Missing
|
||||
else:
|
||||
return obj
|
||||
|
||||
|
||||
class _SerializableColumnGetterV2(_PlainColumnGetter[_KT]):
|
||||
"""Updated serializable getter which deals with
|
||||
multi-table mapped classes.
|
||||
|
||||
Two extremely unusual cases are not supported.
|
||||
Mappings which have tables across multiple metadata
|
||||
objects, or which are mapped to non-Table selectables
|
||||
linked across inheriting mappers may fail to function
|
||||
here.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ("colkeys",)
|
||||
|
||||
def __init__(
|
||||
self, colkeys: Sequence[Tuple[Optional[str], Optional[str]]]
|
||||
) -> None:
|
||||
self.colkeys = colkeys
|
||||
self.composite = len(colkeys) > 1
|
||||
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Type[_SerializableColumnGetterV2[_KT]],
|
||||
Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
|
||||
]:
|
||||
return self.__class__, (self.colkeys,)
|
||||
|
||||
@classmethod
|
||||
def _reduce_from_cols(cls, cols: Sequence[ColumnElement[_KT]]) -> Tuple[
|
||||
Type[_SerializableColumnGetterV2[_KT]],
|
||||
Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
|
||||
]:
|
||||
def _table_key(c: ColumnElement[_KT]) -> Optional[str]:
|
||||
if not isinstance(c.table, expression.TableClause):
|
||||
return None
|
||||
else:
|
||||
return c.table.key # type: ignore
|
||||
|
||||
colkeys = [(c.key, _table_key(c)) for c in cols]
|
||||
return _SerializableColumnGetterV2, (colkeys,)
|
||||
|
||||
def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]:
|
||||
cols: List[ColumnElement[_KT]] = []
|
||||
metadata = getattr(mapper.local_table, "metadata", None)
|
||||
for ckey, tkey in self.colkeys:
|
||||
if tkey is None or metadata is None or tkey not in metadata:
|
||||
cols.append(mapper.local_table.c[ckey]) # type: ignore
|
||||
else:
|
||||
cols.append(metadata.tables[tkey].c[ckey])
|
||||
return cols
|
||||
|
||||
|
||||
def column_keyed_dict(
|
||||
mapping_spec: Union[Type[_KT], Callable[[_KT], _VT]],
|
||||
*,
|
||||
ignore_unpopulated_attribute: bool = False,
|
||||
) -> Type[KeyFuncDict[_KT, _KT]]:
|
||||
"""A dictionary-based collection type with column-based keying.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :data:`.column_mapped_collection` to
|
||||
:class:`.column_keyed_dict`.
|
||||
|
||||
Returns a :class:`.KeyFuncDict` factory which will produce new
|
||||
dictionary keys based on the value of a particular :class:`.Column`-mapped
|
||||
attribute on ORM mapped instances to be added to the dictionary.
|
||||
|
||||
.. note:: the value of the target attribute must be assigned with its
|
||||
value at the time that the object is being added to the
|
||||
dictionary collection. Additionally, changes to the key attribute
|
||||
are **not tracked**, which means the key in the dictionary is not
|
||||
automatically synchronized with the key value on the target object
|
||||
itself. See :ref:`key_collections_mutations` for further details.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`orm_dictionary_collection` - background on use
|
||||
|
||||
:param mapping_spec: a :class:`_schema.Column` object that is expected
|
||||
to be mapped by the target mapper to a particular attribute on the
|
||||
mapped class, the value of which on a particular instance is to be used
|
||||
as the key for a new dictionary entry for that instance.
|
||||
:param ignore_unpopulated_attribute: if True, and the mapped attribute
|
||||
indicated by the given :class:`_schema.Column` target attribute
|
||||
on an object is not populated at all, the operation will be silently
|
||||
skipped. By default, an error is raised.
|
||||
|
||||
.. versionadded:: 2.0 an error is raised by default if the attribute
|
||||
being used for the dictionary key is determined that it was never
|
||||
populated with any value. The
|
||||
:paramref:`_orm.column_keyed_dict.ignore_unpopulated_attribute`
|
||||
parameter may be set which will instead indicate that this condition
|
||||
should be ignored, and the append operation silently skipped.
|
||||
This is in contrast to the behavior of the 1.x series which would
|
||||
erroneously populate the value in the dictionary with an arbitrary key
|
||||
value of ``None``.
|
||||
|
||||
|
||||
"""
|
||||
cols = [
|
||||
coercions.expect(roles.ColumnArgumentRole, q, argname="mapping_spec")
|
||||
for q in util.to_list(mapping_spec)
|
||||
]
|
||||
keyfunc = _PlainColumnGetter(cols)
|
||||
return _mapped_collection_cls(
|
||||
keyfunc,
|
||||
ignore_unpopulated_attribute=ignore_unpopulated_attribute,
|
||||
)
|
||||
|
||||
|
||||
class _AttrGetter:
|
||||
__slots__ = ("attr_name", "getter")
|
||||
|
||||
def __init__(self, attr_name: str):
|
||||
self.attr_name = attr_name
|
||||
self.getter = operator.attrgetter(attr_name)
|
||||
|
||||
def __call__(self, mapped_object: Any) -> Any:
|
||||
obj = self.getter(mapped_object)
|
||||
if obj is None:
|
||||
state = base.instance_state(mapped_object)
|
||||
mp = state.mapper
|
||||
if self.attr_name in mp.attrs:
|
||||
dict_ = state.dict
|
||||
obj = dict_.get(self.attr_name, base.NO_VALUE)
|
||||
if obj is None:
|
||||
return Missing
|
||||
else:
|
||||
return Missing
|
||||
|
||||
return obj
|
||||
|
||||
def __reduce__(self) -> Tuple[Type[_AttrGetter], Tuple[str]]:
|
||||
return _AttrGetter, (self.attr_name,)
|
||||
|
||||
|
||||
def attribute_keyed_dict(
|
||||
attr_name: str, *, ignore_unpopulated_attribute: bool = False
|
||||
) -> Type[KeyFuncDict[Any, Any]]:
|
||||
"""A dictionary-based collection type with attribute-based keying.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to
|
||||
:func:`.attribute_keyed_dict`.
|
||||
|
||||
Returns a :class:`.KeyFuncDict` factory which will produce new
|
||||
dictionary keys based on the value of a particular named attribute on
|
||||
ORM mapped instances to be added to the dictionary.
|
||||
|
||||
.. note:: the value of the target attribute must be assigned with its
|
||||
value at the time that the object is being added to the
|
||||
dictionary collection. Additionally, changes to the key attribute
|
||||
are **not tracked**, which means the key in the dictionary is not
|
||||
automatically synchronized with the key value on the target object
|
||||
itself. See :ref:`key_collections_mutations` for further details.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`orm_dictionary_collection` - background on use
|
||||
|
||||
:param attr_name: string name of an ORM-mapped attribute
|
||||
on the mapped class, the value of which on a particular instance
|
||||
is to be used as the key for a new dictionary entry for that instance.
|
||||
:param ignore_unpopulated_attribute: if True, and the target attribute
|
||||
on an object is not populated at all, the operation will be silently
|
||||
skipped. By default, an error is raised.
|
||||
|
||||
.. versionadded:: 2.0 an error is raised by default if the attribute
|
||||
being used for the dictionary key is determined that it was never
|
||||
populated with any value. The
|
||||
:paramref:`_orm.attribute_keyed_dict.ignore_unpopulated_attribute`
|
||||
parameter may be set which will instead indicate that this condition
|
||||
should be ignored, and the append operation silently skipped.
|
||||
This is in contrast to the behavior of the 1.x series which would
|
||||
erroneously populate the value in the dictionary with an arbitrary key
|
||||
value of ``None``.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
return _mapped_collection_cls(
|
||||
_AttrGetter(attr_name),
|
||||
ignore_unpopulated_attribute=ignore_unpopulated_attribute,
|
||||
)
|
||||
|
||||
|
||||
def keyfunc_mapping(
|
||||
keyfunc: Callable[[Any], Any],
|
||||
*,
|
||||
ignore_unpopulated_attribute: bool = False,
|
||||
) -> Type[KeyFuncDict[_KT, Any]]:
|
||||
"""A dictionary-based collection type with arbitrary keying.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to
|
||||
:func:`.keyfunc_mapping`.
|
||||
|
||||
Returns a :class:`.KeyFuncDict` factory with a keying function
|
||||
generated from keyfunc, a callable that takes an entity and returns a
|
||||
key value.
|
||||
|
||||
.. note:: the given keyfunc is called only once at the time that the
|
||||
target object is being added to the collection. Changes to the
|
||||
effective value returned by the function are not tracked.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`orm_dictionary_collection` - background on use
|
||||
|
||||
:param keyfunc: a callable that will be passed the ORM-mapped instance
|
||||
which should then generate a new key to use in the dictionary.
|
||||
If the value returned is :attr:`.LoaderCallableStatus.NO_VALUE`, an error
|
||||
is raised.
|
||||
:param ignore_unpopulated_attribute: if True, and the callable returns
|
||||
:attr:`.LoaderCallableStatus.NO_VALUE` for a particular instance, the
|
||||
operation will be silently skipped. By default, an error is raised.
|
||||
|
||||
.. versionadded:: 2.0 an error is raised by default if the callable
|
||||
being used for the dictionary key returns
|
||||
:attr:`.LoaderCallableStatus.NO_VALUE`, which in an ORM attribute
|
||||
context indicates an attribute that was never populated with any value.
|
||||
The :paramref:`_orm.mapped_collection.ignore_unpopulated_attribute`
|
||||
parameter may be set which will instead indicate that this condition
|
||||
should be ignored, and the append operation silently skipped. This is
|
||||
in contrast to the behavior of the 1.x series which would erroneously
|
||||
populate the value in the dictionary with an arbitrary key value of
|
||||
``None``.
|
||||
|
||||
|
||||
"""
|
||||
return _mapped_collection_cls(
|
||||
keyfunc, ignore_unpopulated_attribute=ignore_unpopulated_attribute
|
||||
)
|
||||
|
||||
|
||||
class KeyFuncDict(Dict[_KT, _VT]):
|
||||
"""Base for ORM mapped dictionary classes.
|
||||
|
||||
Extends the ``dict`` type with additional methods needed by SQLAlchemy ORM
|
||||
collection classes. Use of :class:`_orm.KeyFuncDict` is most directly
|
||||
by using the :func:`.attribute_keyed_dict` or
|
||||
:func:`.column_keyed_dict` class factories.
|
||||
:class:`_orm.KeyFuncDict` may also serve as the base for user-defined
|
||||
custom dictionary classes.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to
|
||||
:class:`.KeyFuncDict`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`_orm.attribute_keyed_dict`
|
||||
|
||||
:func:`_orm.column_keyed_dict`
|
||||
|
||||
:ref:`orm_dictionary_collection`
|
||||
|
||||
:ref:`orm_custom_collection`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
keyfunc: Callable[[Any], Any],
|
||||
*dict_args: Any,
|
||||
ignore_unpopulated_attribute: bool = False,
|
||||
) -> None:
|
||||
"""Create a new collection with keying provided by keyfunc.
|
||||
|
||||
keyfunc may be any callable that takes an object and returns an object
|
||||
for use as a dictionary key.
|
||||
|
||||
The keyfunc will be called every time the ORM needs to add a member by
|
||||
value-only (such as when loading instances from the database) or
|
||||
remove a member. The usual cautions about dictionary keying apply-
|
||||
``keyfunc(object)`` should return the same output for the life of the
|
||||
collection. Keying based on mutable properties can result in
|
||||
unreachable instances "lost" in the collection.
|
||||
|
||||
"""
|
||||
self.keyfunc = keyfunc
|
||||
self.ignore_unpopulated_attribute = ignore_unpopulated_attribute
|
||||
super().__init__(*dict_args)
|
||||
|
||||
@classmethod
|
||||
def _unreduce(
|
||||
cls,
|
||||
keyfunc: Callable[[Any], Any],
|
||||
values: Dict[_KT, _KT],
|
||||
adapter: Optional[CollectionAdapter] = None,
|
||||
) -> "KeyFuncDict[_KT, _KT]":
|
||||
mp: KeyFuncDict[_KT, _KT] = KeyFuncDict(keyfunc)
|
||||
mp.update(values)
|
||||
# note that the adapter sets itself up onto this collection
|
||||
# when its `__setstate__` method is called
|
||||
return mp
|
||||
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Callable[[_KT, _KT], KeyFuncDict[_KT, _KT]],
|
||||
Tuple[Any, Union[Dict[_KT, _KT], Dict[_KT, _KT]], CollectionAdapter],
|
||||
]:
|
||||
return (
|
||||
KeyFuncDict._unreduce,
|
||||
(
|
||||
self.keyfunc,
|
||||
dict(self),
|
||||
collection_adapter(self),
|
||||
),
|
||||
)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.attributes")
|
||||
def _raise_for_unpopulated(
|
||||
self,
|
||||
value: _KT,
|
||||
initiator: Union[AttributeEventToken, Literal[None, False]] = None,
|
||||
*,
|
||||
warn_only: bool,
|
||||
) -> None:
|
||||
mapper = base.instance_state(value).mapper
|
||||
|
||||
attributes = util.preloaded.orm_attributes
|
||||
|
||||
if not isinstance(initiator, attributes.AttributeEventToken):
|
||||
relationship = "unknown relationship"
|
||||
elif initiator.key in mapper.attrs:
|
||||
relationship = f"{mapper.attrs[initiator.key]}"
|
||||
else:
|
||||
relationship = initiator.key
|
||||
|
||||
if warn_only:
|
||||
util.warn(
|
||||
f"Attribute keyed dictionary value for "
|
||||
f"attribute '{relationship}' was None; this will raise "
|
||||
"in a future release. "
|
||||
f"To skip this assignment entirely, "
|
||||
f'Set the "ignore_unpopulated_attribute=True" '
|
||||
f"parameter on the mapped collection factory."
|
||||
)
|
||||
else:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"In event triggered from population of "
|
||||
f"attribute '{relationship}' "
|
||||
"(potentially from a backref), "
|
||||
f"can't populate value in KeyFuncDict; "
|
||||
"dictionary key "
|
||||
f"derived from {base.instance_str(value)} is not "
|
||||
f"populated. Ensure appropriate state is set up on "
|
||||
f"the {base.instance_str(value)} object "
|
||||
f"before assigning to the {relationship} attribute. "
|
||||
f"To skip this assignment entirely, "
|
||||
f'Set the "ignore_unpopulated_attribute=True" '
|
||||
f"parameter on the mapped collection factory."
|
||||
)
|
||||
|
||||
@collection.appender # type: ignore[misc]
|
||||
@collection.internally_instrumented # type: ignore[misc]
|
||||
def set(
|
||||
self,
|
||||
value: _KT,
|
||||
_sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None,
|
||||
) -> None:
|
||||
"""Add an item by value, consulting the keyfunc for the key."""
|
||||
|
||||
key = self.keyfunc(value)
|
||||
|
||||
if key is base.NO_VALUE:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=False
|
||||
)
|
||||
else:
|
||||
return
|
||||
elif key is Missing:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=True
|
||||
)
|
||||
key = None
|
||||
else:
|
||||
return
|
||||
|
||||
self.__setitem__(key, value, _sa_initiator) # type: ignore[call-arg]
|
||||
|
||||
@collection.remover # type: ignore[misc]
|
||||
@collection.internally_instrumented # type: ignore[misc]
|
||||
def remove(
|
||||
self,
|
||||
value: _KT,
|
||||
_sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None,
|
||||
) -> None:
|
||||
"""Remove an item by value, consulting the keyfunc for the key."""
|
||||
|
||||
key = self.keyfunc(value)
|
||||
|
||||
if key is base.NO_VALUE:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=False
|
||||
)
|
||||
return
|
||||
elif key is Missing:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=True
|
||||
)
|
||||
key = None
|
||||
else:
|
||||
return
|
||||
|
||||
# Let self[key] raise if key is not in this collection
|
||||
# testlib.pragma exempt:__ne__
|
||||
if self[key] != value:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"Can not remove '%s': collection holds '%s' for key '%s'. "
|
||||
"Possible cause: is the KeyFuncDict key function "
|
||||
"based on mutable properties or properties that only obtain "
|
||||
"values after flush?" % (value, self[key], key)
|
||||
)
|
||||
self.__delitem__(key, _sa_initiator) # type: ignore[call-arg]
|
||||
|
||||
|
||||
def _mapped_collection_cls(
|
||||
keyfunc: Callable[[Any], Any], ignore_unpopulated_attribute: bool
|
||||
) -> Type[KeyFuncDict[_KT, _KT]]:
|
||||
class _MKeyfuncMapped(KeyFuncDict[_KT, _KT]):
|
||||
def __init__(self, *dict_args: Any) -> None:
|
||||
super().__init__(
|
||||
keyfunc,
|
||||
*dict_args,
|
||||
ignore_unpopulated_attribute=ignore_unpopulated_attribute,
|
||||
)
|
||||
|
||||
return _MKeyfuncMapped
|
||||
|
||||
|
||||
MappedCollection = KeyFuncDict
|
||||
"""A synonym for :class:`.KeyFuncDict`.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to
|
||||
:class:`.KeyFuncDict`.
|
||||
|
||||
"""
|
||||
|
||||
mapped_collection = keyfunc_mapping
|
||||
"""A synonym for :func:`_orm.keyfunc_mapping`.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to
|
||||
:func:`_orm.keyfunc_mapping`
|
||||
|
||||
"""
|
||||
|
||||
attribute_mapped_collection = attribute_keyed_dict
|
||||
"""A synonym for :func:`_orm.attribute_keyed_dict`.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to
|
||||
:func:`_orm.attribute_keyed_dict`
|
||||
|
||||
"""
|
||||
|
||||
column_mapped_collection = column_keyed_dict
|
||||
"""A synonym for :func:`_orm.column_keyed_dict.
|
||||
|
||||
.. versionchanged:: 2.0 Renamed :func:`.column_mapped_collection` to
|
||||
:func:`_orm.column_keyed_dict`
|
||||
|
||||
"""
|
4431
venv/lib/python3.11/site-packages/sqlalchemy/orm/mapper.py
Normal file
4431
venv/lib/python3.11/site-packages/sqlalchemy/orm/mapper.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,811 @@
|
||||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
"""Path tracking utilities, representing mapper graph traversals.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import reduce
|
||||
from itertools import chain
|
||||
import logging
|
||||
import operator
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from . import base as orm_base
|
||||
from ._typing import insp_is_mapper_property
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..sql import visitors
|
||||
from ..sql.cache_key import HasCacheKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _InternalEntityType
|
||||
from .interfaces import StrategizedProperty
|
||||
from .mapper import Mapper
|
||||
from .relationships import RelationshipProperty
|
||||
from .util import AliasedInsp
|
||||
from ..sql.cache_key import _CacheKeyTraversalType
|
||||
from ..sql.elements import BindParameter
|
||||
from ..sql.visitors import anon_map
|
||||
from ..util.typing import _LiteralStar
|
||||
from ..util.typing import TypeGuard
|
||||
|
||||
def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ...
|
||||
|
||||
def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ...
|
||||
|
||||
else:
|
||||
is_root = operator.attrgetter("is_root")
|
||||
is_entity = operator.attrgetter("is_entity")
|
||||
|
||||
|
||||
_SerializedPath = List[Any]
|
||||
_StrPathToken = str
|
||||
_PathElementType = Union[
|
||||
_StrPathToken, "_InternalEntityType[Any]", "StrategizedProperty[Any]"
|
||||
]
|
||||
|
||||
# the representation is in fact
|
||||
# a tuple with alternating:
|
||||
# [_InternalEntityType[Any], Union[str, StrategizedProperty[Any]],
|
||||
# _InternalEntityType[Any], Union[str, StrategizedProperty[Any]], ...]
|
||||
# this might someday be a tuple of 2-tuples instead, but paths can be
|
||||
# chopped at odd intervals as well so this is less flexible
|
||||
_PathRepresentation = Tuple[_PathElementType, ...]
|
||||
|
||||
# NOTE: these names are weird since the array is 0-indexed,
|
||||
# the "_Odd" entries are at 0, 2, 4, etc
|
||||
_OddPathRepresentation = Sequence["_InternalEntityType[Any]"]
|
||||
_EvenPathRepresentation = Sequence[Union["StrategizedProperty[Any]", str]]
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _unreduce_path(path: _SerializedPath) -> PathRegistry:
|
||||
return PathRegistry.deserialize(path)
|
||||
|
||||
|
||||
_WILDCARD_TOKEN: _LiteralStar = "*"
|
||||
_DEFAULT_TOKEN = "_sa_default"
|
||||
|
||||
|
||||
class PathRegistry(HasCacheKey):
|
||||
"""Represent query load paths and registry functions.
|
||||
|
||||
Basically represents structures like:
|
||||
|
||||
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
|
||||
|
||||
These structures are generated by things like
|
||||
query options (joinedload(), subqueryload(), etc.) and are
|
||||
used to compose keys stored in the query._attributes dictionary
|
||||
for various options.
|
||||
|
||||
They are then re-composed at query compile/result row time as
|
||||
the query is formed and as rows are fetched, where they again
|
||||
serve to compose keys to look up options in the context.attributes
|
||||
dictionary, which is copied from query._attributes.
|
||||
|
||||
The path structure has a limited amount of caching, where each
|
||||
"root" ultimately pulls from a fixed registry associated with
|
||||
the first mapper, that also contains elements for each of its
|
||||
property keys. However paths longer than two elements, which
|
||||
are the exception rather than the rule, are generated on an
|
||||
as-needed basis.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
is_token = False
|
||||
is_root = False
|
||||
has_entity = False
|
||||
is_property = False
|
||||
is_entity = False
|
||||
|
||||
is_unnatural: bool
|
||||
|
||||
path: _PathRepresentation
|
||||
natural_path: _PathRepresentation
|
||||
parent: Optional[PathRegistry]
|
||||
root: RootRegistry
|
||||
|
||||
_cache_key_traversal: _CacheKeyTraversalType = [
|
||||
("path", visitors.ExtendedInternalTraversal.dp_has_cache_key_list)
|
||||
]
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
try:
|
||||
return other is not None and self.path == other._path_for_compare
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"Comparison of PathRegistry to %r is not supported"
|
||||
% (type(other))
|
||||
)
|
||||
return False
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
try:
|
||||
return other is None or self.path != other._path_for_compare
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"Comparison of PathRegistry to %r is not supported"
|
||||
% (type(other))
|
||||
)
|
||||
return True
|
||||
|
||||
@property
|
||||
def _path_for_compare(self) -> Optional[_PathRepresentation]:
|
||||
return self.path
|
||||
|
||||
def odd_element(self, index: int) -> _InternalEntityType[Any]:
|
||||
return self.path[index] # type: ignore
|
||||
|
||||
def set(self, attributes: Dict[Any, Any], key: Any, value: Any) -> None:
|
||||
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes[(key, self.natural_path)] = value
|
||||
|
||||
def setdefault(
|
||||
self, attributes: Dict[Any, Any], key: Any, value: Any
|
||||
) -> None:
|
||||
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
|
||||
attributes.setdefault((key, self.natural_path), value)
|
||||
|
||||
def get(
|
||||
self, attributes: Dict[Any, Any], key: Any, value: Optional[Any] = None
|
||||
) -> Any:
|
||||
key = (key, self.natural_path)
|
||||
if key in attributes:
|
||||
return attributes[key]
|
||||
else:
|
||||
return value
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.path)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return id(self)
|
||||
|
||||
@overload
|
||||
def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, entity: int) -> _PathElementType: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, entity: slice) -> _PathRepresentation: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(
|
||||
self, entity: _InternalEntityType[Any]
|
||||
) -> AbstractEntityRegistry: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(
|
||||
self, entity: StrategizedProperty[Any]
|
||||
) -> PropRegistry: ...
|
||||
|
||||
def __getitem__(
|
||||
self,
|
||||
entity: Union[
|
||||
_StrPathToken,
|
||||
int,
|
||||
slice,
|
||||
_InternalEntityType[Any],
|
||||
StrategizedProperty[Any],
|
||||
],
|
||||
) -> Union[
|
||||
TokenRegistry,
|
||||
_PathElementType,
|
||||
_PathRepresentation,
|
||||
PropRegistry,
|
||||
AbstractEntityRegistry,
|
||||
]:
|
||||
raise NotImplementedError()
|
||||
|
||||
# TODO: what are we using this for?
|
||||
@property
|
||||
def length(self) -> int:
|
||||
return len(self.path)
|
||||
|
||||
def pairs(
|
||||
self,
|
||||
) -> Iterator[
|
||||
Tuple[_InternalEntityType[Any], Union[str, StrategizedProperty[Any]]]
|
||||
]:
|
||||
odd_path = cast(_OddPathRepresentation, self.path)
|
||||
even_path = cast(_EvenPathRepresentation, odd_path)
|
||||
for i in range(0, len(odd_path), 2):
|
||||
yield odd_path[i], even_path[i + 1]
|
||||
|
||||
def contains_mapper(self, mapper: Mapper[Any]) -> bool:
|
||||
_m_path = cast(_OddPathRepresentation, self.path)
|
||||
for path_mapper in [_m_path[i] for i in range(0, len(_m_path), 2)]:
|
||||
if path_mapper.mapper.isa(mapper):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def contains(self, attributes: Dict[Any, Any], key: Any) -> bool:
|
||||
return (key, self.path) in attributes
|
||||
|
||||
def __reduce__(self) -> Any:
|
||||
return _unreduce_path, (self.serialize(),)
|
||||
|
||||
@classmethod
|
||||
def _serialize_path(cls, path: _PathRepresentation) -> _SerializedPath:
|
||||
_m_path = cast(_OddPathRepresentation, path)
|
||||
_p_path = cast(_EvenPathRepresentation, path)
|
||||
|
||||
return list(
|
||||
zip(
|
||||
tuple(
|
||||
m.class_ if (m.is_mapper or m.is_aliased_class) else str(m)
|
||||
for m in [_m_path[i] for i in range(0, len(_m_path), 2)]
|
||||
),
|
||||
tuple(
|
||||
p.key if insp_is_mapper_property(p) else str(p)
|
||||
for p in [_p_path[i] for i in range(1, len(_p_path), 2)]
|
||||
)
|
||||
+ (None,),
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _deserialize_path(cls, path: _SerializedPath) -> _PathRepresentation:
|
||||
def _deserialize_mapper_token(mcls: Any) -> Any:
|
||||
return (
|
||||
# note: we likely dont want configure=True here however
|
||||
# this is maintained at the moment for backwards compatibility
|
||||
orm_base._inspect_mapped_class(mcls, configure=True)
|
||||
if mcls not in PathToken._intern
|
||||
else PathToken._intern[mcls]
|
||||
)
|
||||
|
||||
def _deserialize_key_token(mcls: Any, key: Any) -> Any:
|
||||
if key is None:
|
||||
return None
|
||||
elif key in PathToken._intern:
|
||||
return PathToken._intern[key]
|
||||
else:
|
||||
mp = orm_base._inspect_mapped_class(mcls, configure=True)
|
||||
assert mp is not None
|
||||
return mp.attrs[key]
|
||||
|
||||
p = tuple(
|
||||
chain(
|
||||
*[
|
||||
(
|
||||
_deserialize_mapper_token(mcls),
|
||||
_deserialize_key_token(mcls, key),
|
||||
)
|
||||
for mcls, key in path
|
||||
]
|
||||
)
|
||||
)
|
||||
if p and p[-1] is None:
|
||||
p = p[0:-1]
|
||||
return p
|
||||
|
||||
def serialize(self) -> _SerializedPath:
|
||||
path = self.path
|
||||
return self._serialize_path(path)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, path: _SerializedPath) -> PathRegistry:
|
||||
assert path is not None
|
||||
p = cls._deserialize_path(path)
|
||||
return cls.coerce(p)
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ...
|
||||
|
||||
@classmethod
|
||||
def per_mapper(
|
||||
cls, mapper: _InternalEntityType[Any]
|
||||
) -> AbstractEntityRegistry:
|
||||
if mapper.is_mapper:
|
||||
return CachingEntityRegistry(cls.root, mapper)
|
||||
else:
|
||||
return SlotsEntityRegistry(cls.root, mapper)
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, raw: _PathRepresentation) -> PathRegistry:
|
||||
def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry:
|
||||
return prev[next_]
|
||||
|
||||
# can't quite get mypy to appreciate this one :)
|
||||
return reduce(_red, raw, cls.root) # type: ignore
|
||||
|
||||
def __add__(self, other: PathRegistry) -> PathRegistry:
|
||||
def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry:
|
||||
return prev[next_]
|
||||
|
||||
return reduce(_red, other.path, self)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"ORM Path[{' -> '.join(str(elem) for elem in self.path)}]"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.path!r})"
|
||||
|
||||
|
||||
class CreatesToken(PathRegistry):
|
||||
__slots__ = ()
|
||||
|
||||
is_aliased_class: bool
|
||||
is_root: bool
|
||||
|
||||
def token(self, token: _StrPathToken) -> TokenRegistry:
|
||||
if token.endswith(f":{_WILDCARD_TOKEN}"):
|
||||
return TokenRegistry(self, token)
|
||||
elif token.endswith(f":{_DEFAULT_TOKEN}"):
|
||||
return TokenRegistry(self.root, token)
|
||||
else:
|
||||
raise exc.ArgumentError(f"invalid token: {token}")
|
||||
|
||||
|
||||
class RootRegistry(CreatesToken):
|
||||
"""Root registry, defers to mappers so that
|
||||
paths are maintained per-root-mapper.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
inherit_cache = True
|
||||
|
||||
path = natural_path = ()
|
||||
has_entity = False
|
||||
is_aliased_class = False
|
||||
is_root = True
|
||||
is_unnatural = False
|
||||
|
||||
def _getitem(
|
||||
self, entity: Any
|
||||
) -> Union[TokenRegistry, AbstractEntityRegistry]:
|
||||
if entity in PathToken._intern:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(entity, _StrPathToken)
|
||||
return TokenRegistry(self, PathToken._intern[entity])
|
||||
else:
|
||||
try:
|
||||
return entity._path_registry # type: ignore
|
||||
except AttributeError:
|
||||
raise IndexError(
|
||||
f"invalid argument for RootRegistry.__getitem__: {entity}"
|
||||
)
|
||||
|
||||
def _truncate_recursive(self) -> RootRegistry:
|
||||
return self
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__getitem__ = _getitem
|
||||
|
||||
|
||||
PathRegistry.root = RootRegistry()
|
||||
|
||||
|
||||
class PathToken(orm_base.InspectionAttr, HasCacheKey, str):
|
||||
"""cacheable string token"""
|
||||
|
||||
_intern: Dict[str, PathToken] = {}
|
||||
|
||||
def _gen_cache_key(
|
||||
self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
|
||||
) -> Tuple[Any, ...]:
|
||||
return (str(self),)
|
||||
|
||||
@property
|
||||
def _path_for_compare(self) -> Optional[_PathRepresentation]:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def intern(cls, strvalue: str) -> PathToken:
|
||||
if strvalue in cls._intern:
|
||||
return cls._intern[strvalue]
|
||||
else:
|
||||
cls._intern[strvalue] = result = PathToken(strvalue)
|
||||
return result
|
||||
|
||||
|
||||
class TokenRegistry(PathRegistry):
|
||||
__slots__ = ("token", "parent", "path", "natural_path")
|
||||
|
||||
inherit_cache = True
|
||||
|
||||
token: _StrPathToken
|
||||
parent: CreatesToken
|
||||
|
||||
def __init__(self, parent: CreatesToken, token: _StrPathToken):
|
||||
token = PathToken.intern(token)
|
||||
|
||||
self.token = token
|
||||
self.parent = parent
|
||||
self.path = parent.path + (token,)
|
||||
self.natural_path = parent.natural_path + (token,)
|
||||
|
||||
has_entity = False
|
||||
|
||||
is_token = True
|
||||
|
||||
def generate_for_superclasses(self) -> Iterator[PathRegistry]:
|
||||
# NOTE: this method is no longer used. consider removal
|
||||
parent = self.parent
|
||||
if is_root(parent):
|
||||
yield self
|
||||
return
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(parent, AbstractEntityRegistry)
|
||||
if not parent.is_aliased_class:
|
||||
for mp_ent in parent.mapper.iterate_to_root():
|
||||
yield TokenRegistry(parent.parent[mp_ent], self.token)
|
||||
elif (
|
||||
parent.is_aliased_class
|
||||
and cast(
|
||||
"AliasedInsp[Any]",
|
||||
parent.entity,
|
||||
)._is_with_polymorphic
|
||||
):
|
||||
yield self
|
||||
for ent in cast(
|
||||
"AliasedInsp[Any]", parent.entity
|
||||
)._with_polymorphic_entities:
|
||||
yield TokenRegistry(parent.parent[ent], self.token)
|
||||
else:
|
||||
yield self
|
||||
|
||||
def _generate_natural_for_superclasses(
|
||||
self,
|
||||
) -> Iterator[_PathRepresentation]:
|
||||
parent = self.parent
|
||||
if is_root(parent):
|
||||
yield self.natural_path
|
||||
return
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(parent, AbstractEntityRegistry)
|
||||
for mp_ent in parent.mapper.iterate_to_root():
|
||||
yield TokenRegistry(parent.parent[mp_ent], self.token).natural_path
|
||||
if (
|
||||
parent.is_aliased_class
|
||||
and cast(
|
||||
"AliasedInsp[Any]",
|
||||
parent.entity,
|
||||
)._is_with_polymorphic
|
||||
):
|
||||
yield self.natural_path
|
||||
for ent in cast(
|
||||
"AliasedInsp[Any]", parent.entity
|
||||
)._with_polymorphic_entities:
|
||||
yield (
|
||||
TokenRegistry(parent.parent[ent], self.token).natural_path
|
||||
)
|
||||
else:
|
||||
yield self.natural_path
|
||||
|
||||
def _getitem(self, entity: Any) -> Any:
|
||||
try:
|
||||
return self.path[entity]
|
||||
except TypeError as err:
|
||||
raise IndexError(f"{entity}") from err
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__getitem__ = _getitem
|
||||
|
||||
|
||||
class PropRegistry(PathRegistry):
|
||||
__slots__ = (
|
||||
"prop",
|
||||
"parent",
|
||||
"path",
|
||||
"natural_path",
|
||||
"has_entity",
|
||||
"entity",
|
||||
"mapper",
|
||||
"_wildcard_path_loader_key",
|
||||
"_default_path_loader_key",
|
||||
"_loader_key",
|
||||
"is_unnatural",
|
||||
)
|
||||
inherit_cache = True
|
||||
is_property = True
|
||||
|
||||
prop: StrategizedProperty[Any]
|
||||
mapper: Optional[Mapper[Any]]
|
||||
entity: Optional[_InternalEntityType[Any]]
|
||||
|
||||
def __init__(
|
||||
self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any]
|
||||
):
|
||||
|
||||
# restate this path in terms of the
|
||||
# given StrategizedProperty's parent.
|
||||
insp = cast("_InternalEntityType[Any]", parent[-1])
|
||||
natural_parent: AbstractEntityRegistry = parent
|
||||
|
||||
# inherit "is_unnatural" from the parent
|
||||
self.is_unnatural = parent.parent.is_unnatural or bool(
|
||||
parent.mapper.inherits
|
||||
)
|
||||
|
||||
if not insp.is_aliased_class or insp._use_mapper_path: # type: ignore
|
||||
parent = natural_parent = parent.parent[prop.parent]
|
||||
elif (
|
||||
insp.is_aliased_class
|
||||
and insp.with_polymorphic_mappers
|
||||
and prop.parent in insp.with_polymorphic_mappers
|
||||
):
|
||||
subclass_entity: _InternalEntityType[Any] = parent[-1]._entity_for_mapper(prop.parent) # type: ignore # noqa: E501
|
||||
parent = parent.parent[subclass_entity]
|
||||
|
||||
# when building a path where with_polymorphic() is in use,
|
||||
# special logic to determine the "natural path" when subclass
|
||||
# entities are used.
|
||||
#
|
||||
# here we are trying to distinguish between a path that starts
|
||||
# on a with_polymorphic entity vs. one that starts on a
|
||||
# normal entity that introduces a with_polymorphic() in the
|
||||
# middle using of_type():
|
||||
#
|
||||
# # as in test_polymorphic_rel->
|
||||
# # test_subqueryload_on_subclass_uses_path_correctly
|
||||
# wp = with_polymorphic(RegularEntity, "*")
|
||||
# sess.query(wp).options(someload(wp.SomeSubEntity.foos))
|
||||
#
|
||||
# vs
|
||||
#
|
||||
# # as in test_relationship->JoinedloadWPolyOfTypeContinued
|
||||
# wp = with_polymorphic(SomeFoo, "*")
|
||||
# sess.query(RegularEntity).options(
|
||||
# someload(RegularEntity.foos.of_type(wp))
|
||||
# .someload(wp.SubFoo.bar)
|
||||
# )
|
||||
#
|
||||
# in the former case, the Query as it generates a path that we
|
||||
# want to match will be in terms of the with_polymorphic at the
|
||||
# beginning. in the latter case, Query will generate simple
|
||||
# paths that don't know about this with_polymorphic, so we must
|
||||
# use a separate natural path.
|
||||
#
|
||||
#
|
||||
if parent.parent:
|
||||
natural_parent = parent.parent[subclass_entity.mapper]
|
||||
self.is_unnatural = True
|
||||
else:
|
||||
natural_parent = parent
|
||||
elif (
|
||||
natural_parent.parent
|
||||
and insp.is_aliased_class
|
||||
and prop.parent # this should always be the case here
|
||||
is not insp.mapper
|
||||
and insp.mapper.isa(prop.parent)
|
||||
):
|
||||
natural_parent = parent.parent[prop.parent]
|
||||
|
||||
self.prop = prop
|
||||
self.parent = parent
|
||||
self.path = parent.path + (prop,)
|
||||
self.natural_path = natural_parent.natural_path + (prop,)
|
||||
|
||||
self.has_entity = prop._links_to_entity
|
||||
if prop._is_relationship:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(prop, RelationshipProperty)
|
||||
self.entity = prop.entity
|
||||
self.mapper = prop.mapper
|
||||
else:
|
||||
self.entity = None
|
||||
self.mapper = None
|
||||
|
||||
self._wildcard_path_loader_key = (
|
||||
"loader",
|
||||
parent.natural_path + self.prop._wildcard_token,
|
||||
)
|
||||
self._default_path_loader_key = self.prop._default_path_loader_key
|
||||
self._loader_key = ("loader", self.natural_path)
|
||||
|
||||
def _truncate_recursive(self) -> PropRegistry:
|
||||
earliest = None
|
||||
for i, token in enumerate(reversed(self.path[:-1])):
|
||||
if token is self.prop:
|
||||
earliest = i
|
||||
|
||||
if earliest is None:
|
||||
return self
|
||||
else:
|
||||
return self.coerce(self.path[0 : -(earliest + 1)]) # type: ignore
|
||||
|
||||
@property
|
||||
def entity_path(self) -> AbstractEntityRegistry:
|
||||
assert self.entity is not None
|
||||
return self[self.entity]
|
||||
|
||||
def _getitem(
|
||||
self, entity: Union[int, slice, _InternalEntityType[Any]]
|
||||
) -> Union[AbstractEntityRegistry, _PathElementType, _PathRepresentation]:
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
else:
|
||||
return SlotsEntityRegistry(self, entity)
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__getitem__ = _getitem
|
||||
|
||||
|
||||
class AbstractEntityRegistry(CreatesToken):
|
||||
__slots__ = (
|
||||
"key",
|
||||
"parent",
|
||||
"is_aliased_class",
|
||||
"path",
|
||||
"entity",
|
||||
"natural_path",
|
||||
)
|
||||
|
||||
has_entity = True
|
||||
is_entity = True
|
||||
|
||||
parent: Union[RootRegistry, PropRegistry]
|
||||
key: _InternalEntityType[Any]
|
||||
entity: _InternalEntityType[Any]
|
||||
is_aliased_class: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: Union[RootRegistry, PropRegistry],
|
||||
entity: _InternalEntityType[Any],
|
||||
):
|
||||
self.key = entity
|
||||
self.parent = parent
|
||||
self.is_aliased_class = entity.is_aliased_class
|
||||
self.entity = entity
|
||||
self.path = parent.path + (entity,)
|
||||
|
||||
# the "natural path" is the path that we get when Query is traversing
|
||||
# from the lead entities into the various relationships; it corresponds
|
||||
# to the structure of mappers and relationships. when we are given a
|
||||
# path that comes from loader options, as of 1.3 it can have ac-hoc
|
||||
# with_polymorphic() and other AliasedInsp objects inside of it, which
|
||||
# are usually not present in mappings. So here we track both the
|
||||
# "enhanced" path in self.path and the "natural" path that doesn't
|
||||
# include those objects so these two traversals can be matched up.
|
||||
|
||||
# the test here for "(self.is_aliased_class or parent.is_unnatural)"
|
||||
# are to avoid the more expensive conditional logic that follows if we
|
||||
# know we don't have to do it. This conditional can just as well be
|
||||
# "if parent.path:", it just is more function calls.
|
||||
#
|
||||
# This is basically the only place that the "is_unnatural" flag
|
||||
# actually changes behavior.
|
||||
if parent.path and (self.is_aliased_class or parent.is_unnatural):
|
||||
# this is an infrequent code path used only for loader strategies
|
||||
# that also make use of of_type().
|
||||
if entity.mapper.isa(parent.natural_path[-1].mapper): # type: ignore # noqa: E501
|
||||
self.natural_path = parent.natural_path + (entity.mapper,)
|
||||
else:
|
||||
self.natural_path = parent.natural_path + (
|
||||
parent.natural_path[-1].entity, # type: ignore
|
||||
)
|
||||
# it seems to make sense that since these paths get mixed up
|
||||
# with statements that are cached or not, we should make
|
||||
# sure the natural path is cacheable across different occurrences
|
||||
# of equivalent AliasedClass objects. however, so far this
|
||||
# does not seem to be needed for whatever reason.
|
||||
# elif not parent.path and self.is_aliased_class:
|
||||
# self.natural_path = (self.entity._generate_cache_key()[0], )
|
||||
else:
|
||||
self.natural_path = self.path
|
||||
|
||||
def _truncate_recursive(self) -> AbstractEntityRegistry:
|
||||
return self.parent._truncate_recursive()[self.entity]
|
||||
|
||||
@property
|
||||
def root_entity(self) -> _InternalEntityType[Any]:
|
||||
return self.odd_element(0)
|
||||
|
||||
@property
|
||||
def entity_path(self) -> PathRegistry:
|
||||
return self
|
||||
|
||||
@property
|
||||
def mapper(self) -> Mapper[Any]:
|
||||
return self.entity.mapper
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
def _getitem(
|
||||
self, entity: Any
|
||||
) -> Union[_PathElementType, _PathRepresentation, PathRegistry]:
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
elif entity in PathToken._intern:
|
||||
return TokenRegistry(self, PathToken._intern[entity])
|
||||
else:
|
||||
return PropRegistry(self, entity)
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__getitem__ = _getitem
|
||||
|
||||
|
||||
class SlotsEntityRegistry(AbstractEntityRegistry):
|
||||
# for aliased class, return lightweight, no-cycles created
|
||||
# version
|
||||
inherit_cache = True
|
||||
|
||||
|
||||
class _ERDict(Dict[Any, Any]):
|
||||
def __init__(self, registry: CachingEntityRegistry):
|
||||
self.registry = registry
|
||||
|
||||
def __missing__(self, key: Any) -> PropRegistry:
|
||||
self[key] = item = PropRegistry(self.registry, key)
|
||||
|
||||
return item
|
||||
|
||||
|
||||
class CachingEntityRegistry(AbstractEntityRegistry):
|
||||
# for long lived mapper, return dict based caching
|
||||
# version that creates reference cycles
|
||||
|
||||
__slots__ = ("_cache",)
|
||||
|
||||
inherit_cache = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: Union[RootRegistry, PropRegistry],
|
||||
entity: _InternalEntityType[Any],
|
||||
):
|
||||
super().__init__(parent, entity)
|
||||
self._cache = _ERDict(self)
|
||||
|
||||
def pop(self, key: Any, default: Any) -> Any:
|
||||
return self._cache.pop(key, default)
|
||||
|
||||
def _getitem(self, entity: Any) -> Any:
|
||||
if isinstance(entity, (int, slice)):
|
||||
return self.path[entity]
|
||||
elif isinstance(entity, PathToken):
|
||||
return TokenRegistry(self, entity)
|
||||
else:
|
||||
return self._cache[entity]
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__getitem__ = _getitem
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def path_is_entity(
|
||||
path: PathRegistry,
|
||||
) -> TypeGuard[AbstractEntityRegistry]: ...
|
||||
|
||||
def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ...
|
||||
|
||||
else:
|
||||
path_is_entity = operator.attrgetter("is_entity")
|
||||
path_is_property = operator.attrgetter("is_property")
|
1782
venv/lib/python3.11/site-packages/sqlalchemy/orm/persistence.py
Normal file
1782
venv/lib/python3.11/site-packages/sqlalchemy/orm/persistence.py
Normal file
File diff suppressed because it is too large
Load Diff
886
venv/lib/python3.11/site-packages/sqlalchemy/orm/properties.py
Normal file
886
venv/lib/python3.11/site-packages/sqlalchemy/orm/properties.py
Normal file
@ -0,0 +1,886 @@
|
||||
# orm/properties.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""MapperProperty implementations.
|
||||
|
||||
This is a private module which defines the behavior of individual ORM-
|
||||
mapped attributes.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from . import attributes
|
||||
from . import exc as orm_exc
|
||||
from . import strategy_options
|
||||
from .base import _DeclarativeMapped
|
||||
from .base import class_mapper
|
||||
from .descriptor_props import CompositeProperty
|
||||
from .descriptor_props import ConcreteInheritedProperty
|
||||
from .descriptor_props import SynonymProperty
|
||||
from .interfaces import _AttributeOptions
|
||||
from .interfaces import _DEFAULT_ATTRIBUTE_OPTIONS
|
||||
from .interfaces import _IntrospectsAnnotations
|
||||
from .interfaces import _MapsColumns
|
||||
from .interfaces import MapperProperty
|
||||
from .interfaces import PropComparator
|
||||
from .interfaces import StrategizedProperty
|
||||
from .relationships import RelationshipProperty
|
||||
from .util import de_stringify_annotation
|
||||
from .. import exc as sa_exc
|
||||
from .. import ForeignKey
|
||||
from .. import log
|
||||
from .. import util
|
||||
from ..sql import coercions
|
||||
from ..sql import roles
|
||||
from ..sql.base import _NoArg
|
||||
from ..sql.schema import Column
|
||||
from ..sql.schema import SchemaConst
|
||||
from ..sql.type_api import TypeEngine
|
||||
from ..util.typing import de_optionalize_union_types
|
||||
from ..util.typing import get_args
|
||||
from ..util.typing import includes_none
|
||||
from ..util.typing import is_a_type
|
||||
from ..util.typing import is_fwd_ref
|
||||
from ..util.typing import is_pep593
|
||||
from ..util.typing import is_pep695
|
||||
from ..util.typing import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _IdentityKeyType
|
||||
from ._typing import _InstanceDict
|
||||
from ._typing import _ORMColumnExprArgument
|
||||
from ._typing import _RegistryType
|
||||
from .base import Mapped
|
||||
from .decl_base import _ClassScanMapperConfig
|
||||
from .mapper import Mapper
|
||||
from .session import Session
|
||||
from .state import _InstallLoaderCallableProto
|
||||
from .state import InstanceState
|
||||
from ..sql._typing import _InfoType
|
||||
from ..sql.elements import ColumnElement
|
||||
from ..sql.elements import NamedColumn
|
||||
from ..sql.operators import OperatorType
|
||||
from ..util.typing import _AnnotationScanType
|
||||
from ..util.typing import RODescriptorReference
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
_PT = TypeVar("_PT", bound=Any)
|
||||
_NC = TypeVar("_NC", bound="NamedColumn[Any]")
|
||||
|
||||
__all__ = [
|
||||
"ColumnProperty",
|
||||
"CompositeProperty",
|
||||
"ConcreteInheritedProperty",
|
||||
"RelationshipProperty",
|
||||
"SynonymProperty",
|
||||
]
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class ColumnProperty(
|
||||
_MapsColumns[_T],
|
||||
StrategizedProperty[_T],
|
||||
_IntrospectsAnnotations,
|
||||
log.Identified,
|
||||
):
|
||||
"""Describes an object attribute that corresponds to a table column
|
||||
or other column expression.
|
||||
|
||||
Public constructor is the :func:`_orm.column_property` function.
|
||||
|
||||
"""
|
||||
|
||||
strategy_wildcard_key = strategy_options._COLUMN_TOKEN
|
||||
inherit_cache = True
|
||||
""":meta private:"""
|
||||
|
||||
_links_to_entity = False
|
||||
|
||||
columns: List[NamedColumn[Any]]
|
||||
|
||||
_is_polymorphic_discriminator: bool
|
||||
|
||||
_mapped_by_synonym: Optional[str]
|
||||
|
||||
comparator_factory: Type[PropComparator[_T]]
|
||||
|
||||
__slots__ = (
|
||||
"columns",
|
||||
"group",
|
||||
"deferred",
|
||||
"instrument",
|
||||
"comparator_factory",
|
||||
"active_history",
|
||||
"expire_on_flush",
|
||||
"_creation_order",
|
||||
"_is_polymorphic_discriminator",
|
||||
"_mapped_by_synonym",
|
||||
"_deferred_column_loader",
|
||||
"_raise_column_loader",
|
||||
"_renders_in_subqueries",
|
||||
"raiseload",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
column: _ORMColumnExprArgument[_T],
|
||||
*additional_columns: _ORMColumnExprArgument[Any],
|
||||
attribute_options: Optional[_AttributeOptions] = None,
|
||||
group: Optional[str] = None,
|
||||
deferred: bool = False,
|
||||
raiseload: bool = False,
|
||||
comparator_factory: Optional[Type[PropComparator[_T]]] = None,
|
||||
active_history: bool = False,
|
||||
expire_on_flush: bool = True,
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
_instrument: bool = True,
|
||||
_assume_readonly_dc_attributes: bool = False,
|
||||
):
|
||||
super().__init__(
|
||||
attribute_options=attribute_options,
|
||||
_assume_readonly_dc_attributes=_assume_readonly_dc_attributes,
|
||||
)
|
||||
columns = (column,) + additional_columns
|
||||
self.columns = [
|
||||
coercions.expect(roles.LabeledColumnExprRole, c) for c in columns
|
||||
]
|
||||
self.group = group
|
||||
self.deferred = deferred
|
||||
self.raiseload = raiseload
|
||||
self.instrument = _instrument
|
||||
self.comparator_factory = (
|
||||
comparator_factory
|
||||
if comparator_factory is not None
|
||||
else self.__class__.Comparator
|
||||
)
|
||||
self.active_history = active_history
|
||||
self.expire_on_flush = expire_on_flush
|
||||
|
||||
if info is not None:
|
||||
self.info.update(info)
|
||||
|
||||
if doc is not None:
|
||||
self.doc = doc
|
||||
else:
|
||||
for col in reversed(self.columns):
|
||||
doc = getattr(col, "doc", None)
|
||||
if doc is not None:
|
||||
self.doc = doc
|
||||
break
|
||||
else:
|
||||
self.doc = None
|
||||
|
||||
util.set_creation_order(self)
|
||||
|
||||
self.strategy_key = (
|
||||
("deferred", self.deferred),
|
||||
("instrument", self.instrument),
|
||||
)
|
||||
if self.raiseload:
|
||||
self.strategy_key += (("raiseload", True),)
|
||||
|
||||
def declarative_scan(
|
||||
self,
|
||||
decl_scan: _ClassScanMapperConfig,
|
||||
registry: _RegistryType,
|
||||
cls: Type[Any],
|
||||
originating_module: Optional[str],
|
||||
key: str,
|
||||
mapped_container: Optional[Type[Mapped[Any]]],
|
||||
annotation: Optional[_AnnotationScanType],
|
||||
extracted_mapped_annotation: Optional[_AnnotationScanType],
|
||||
is_dataclass_field: bool,
|
||||
) -> None:
|
||||
column = self.columns[0]
|
||||
if column.key is None:
|
||||
column.key = key
|
||||
if column.name is None:
|
||||
column.name = key
|
||||
|
||||
@property
|
||||
def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]:
|
||||
return self
|
||||
|
||||
@property
|
||||
def columns_to_assign(self) -> List[Tuple[Column[Any], int]]:
|
||||
# mypy doesn't care about the isinstance here
|
||||
return [
|
||||
(c, 0) # type: ignore
|
||||
for c in self.columns
|
||||
if isinstance(c, Column) and c.table is None
|
||||
]
|
||||
|
||||
def _memoized_attr__renders_in_subqueries(self) -> bool:
|
||||
if ("query_expression", True) in self.strategy_key:
|
||||
return self.strategy._have_default_expression # type: ignore
|
||||
|
||||
return ("deferred", True) not in self.strategy_key or (
|
||||
self not in self.parent._readonly_props # type: ignore
|
||||
)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
|
||||
def _memoized_attr__deferred_column_loader(
|
||||
self,
|
||||
) -> _InstallLoaderCallableProto[Any]:
|
||||
state = util.preloaded.orm_state
|
||||
strategies = util.preloaded.orm_strategies
|
||||
return state.InstanceState._instance_level_callable_processor(
|
||||
self.parent.class_manager,
|
||||
strategies.LoadDeferredColumns(self.key),
|
||||
self.key,
|
||||
)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
|
||||
def _memoized_attr__raise_column_loader(
|
||||
self,
|
||||
) -> _InstallLoaderCallableProto[Any]:
|
||||
state = util.preloaded.orm_state
|
||||
strategies = util.preloaded.orm_strategies
|
||||
return state.InstanceState._instance_level_callable_processor(
|
||||
self.parent.class_manager,
|
||||
strategies.LoadDeferredColumns(self.key, True),
|
||||
self.key,
|
||||
)
|
||||
|
||||
def __clause_element__(self) -> roles.ColumnsClauseRole:
|
||||
"""Allow the ColumnProperty to work in expression before it is turned
|
||||
into an instrumented attribute.
|
||||
"""
|
||||
|
||||
return self.expression
|
||||
|
||||
@property
|
||||
def expression(self) -> roles.ColumnsClauseRole:
|
||||
"""Return the primary column or expression for this ColumnProperty.
|
||||
|
||||
E.g.::
|
||||
|
||||
|
||||
class File(Base):
|
||||
# ...
|
||||
|
||||
name = Column(String(64))
|
||||
extension = Column(String(8))
|
||||
filename = column_property(name + "." + extension)
|
||||
path = column_property("C:/" + filename.expression)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mapper_column_property_sql_expressions_composed`
|
||||
|
||||
"""
|
||||
return self.columns[0]
|
||||
|
||||
def instrument_class(self, mapper: Mapper[Any]) -> None:
|
||||
if not self.instrument:
|
||||
return
|
||||
|
||||
attributes.register_descriptor(
|
||||
mapper.class_,
|
||||
self.key,
|
||||
comparator=self.comparator_factory(self, mapper),
|
||||
parententity=mapper,
|
||||
doc=self.doc,
|
||||
)
|
||||
|
||||
def do_init(self) -> None:
|
||||
super().do_init()
|
||||
|
||||
if len(self.columns) > 1 and set(self.parent.primary_key).issuperset(
|
||||
self.columns
|
||||
):
|
||||
util.warn(
|
||||
(
|
||||
"On mapper %s, primary key column '%s' is being combined "
|
||||
"with distinct primary key column '%s' in attribute '%s'. "
|
||||
"Use explicit properties to give each column its own "
|
||||
"mapped attribute name."
|
||||
)
|
||||
% (self.parent, self.columns[1], self.columns[0], self.key)
|
||||
)
|
||||
|
||||
def copy(self) -> ColumnProperty[_T]:
|
||||
return ColumnProperty(
|
||||
*self.columns,
|
||||
deferred=self.deferred,
|
||||
group=self.group,
|
||||
active_history=self.active_history,
|
||||
)
|
||||
|
||||
def merge(
|
||||
self,
|
||||
session: Session,
|
||||
source_state: InstanceState[Any],
|
||||
source_dict: _InstanceDict,
|
||||
dest_state: InstanceState[Any],
|
||||
dest_dict: _InstanceDict,
|
||||
load: bool,
|
||||
_recursive: Dict[Any, object],
|
||||
_resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
|
||||
) -> None:
|
||||
if not self.instrument:
|
||||
return
|
||||
elif self.key in source_dict:
|
||||
value = source_dict[self.key]
|
||||
|
||||
if not load:
|
||||
dest_dict[self.key] = value
|
||||
else:
|
||||
impl = dest_state.get_impl(self.key)
|
||||
impl.set(dest_state, dest_dict, value, None)
|
||||
elif dest_state.has_identity and self.key not in dest_dict:
|
||||
dest_state._expire_attributes(
|
||||
dest_dict, [self.key], no_loader=True
|
||||
)
|
||||
|
||||
class Comparator(util.MemoizedSlots, PropComparator[_PT]):
|
||||
"""Produce boolean, comparison, and other operators for
|
||||
:class:`.ColumnProperty` attributes.
|
||||
|
||||
See the documentation for :class:`.PropComparator` for a brief
|
||||
overview.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.PropComparator`
|
||||
|
||||
:class:`.ColumnOperators`
|
||||
|
||||
:ref:`types_operators`
|
||||
|
||||
:attr:`.TypeEngine.comparator_factory`
|
||||
|
||||
"""
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# prevent pylance from being clever about slots
|
||||
__slots__ = "__clause_element__", "info", "expressions"
|
||||
|
||||
prop: RODescriptorReference[ColumnProperty[_PT]]
|
||||
|
||||
expressions: Sequence[NamedColumn[Any]]
|
||||
"""The full sequence of columns referenced by this
|
||||
attribute, adjusted for any aliasing in progress.
|
||||
|
||||
.. versionadded:: 1.3.17
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`maptojoin` - usage example
|
||||
"""
|
||||
|
||||
def _orm_annotate_column(self, column: _NC) -> _NC:
|
||||
"""annotate and possibly adapt a column to be returned
|
||||
as the mapped-attribute exposed version of the column.
|
||||
|
||||
The column in this context needs to act as much like the
|
||||
column in an ORM mapped context as possible, so includes
|
||||
annotations to give hints to various ORM functions as to
|
||||
the source entity of this column. It also adapts it
|
||||
to the mapper's with_polymorphic selectable if one is
|
||||
present.
|
||||
|
||||
"""
|
||||
|
||||
pe = self._parententity
|
||||
annotations: Dict[str, Any] = {
|
||||
"entity_namespace": pe,
|
||||
"parententity": pe,
|
||||
"parentmapper": pe,
|
||||
"proxy_key": self.prop.key,
|
||||
}
|
||||
|
||||
col = column
|
||||
|
||||
# for a mapper with polymorphic_on and an adapter, return
|
||||
# the column against the polymorphic selectable.
|
||||
# see also orm.util._orm_downgrade_polymorphic_columns
|
||||
# for the reverse operation.
|
||||
if self._parentmapper._polymorphic_adapter:
|
||||
mapper_local_col = col
|
||||
col = self._parentmapper._polymorphic_adapter.traverse(col)
|
||||
|
||||
# this is a clue to the ORM Query etc. that this column
|
||||
# was adapted to the mapper's polymorphic_adapter. the
|
||||
# ORM uses this hint to know which column its adapting.
|
||||
annotations["adapt_column"] = mapper_local_col
|
||||
|
||||
return col._annotate(annotations)._set_propagate_attrs(
|
||||
{"compile_state_plugin": "orm", "plugin_subject": pe}
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def __clause_element__(self) -> NamedColumn[_PT]: ...
|
||||
|
||||
def _memoized_method___clause_element__(
|
||||
self,
|
||||
) -> NamedColumn[_PT]:
|
||||
if self.adapter:
|
||||
return self.adapter(self.prop.columns[0], self.prop.key)
|
||||
else:
|
||||
return self._orm_annotate_column(self.prop.columns[0])
|
||||
|
||||
def _memoized_attr_info(self) -> _InfoType:
|
||||
"""The .info dictionary for this attribute."""
|
||||
|
||||
ce = self.__clause_element__()
|
||||
try:
|
||||
return ce.info # type: ignore
|
||||
except AttributeError:
|
||||
return self.prop.info
|
||||
|
||||
def _memoized_attr_expressions(self) -> Sequence[NamedColumn[Any]]:
|
||||
"""The full sequence of columns referenced by this
|
||||
attribute, adjusted for any aliasing in progress.
|
||||
|
||||
.. versionadded:: 1.3.17
|
||||
|
||||
"""
|
||||
if self.adapter:
|
||||
return [
|
||||
self.adapter(col, self.prop.key)
|
||||
for col in self.prop.columns
|
||||
]
|
||||
else:
|
||||
return [
|
||||
self._orm_annotate_column(col) for col in self.prop.columns
|
||||
]
|
||||
|
||||
def _fallback_getattr(self, key: str) -> Any:
|
||||
"""proxy attribute access down to the mapped column.
|
||||
|
||||
this allows user-defined comparison methods to be accessed.
|
||||
"""
|
||||
return getattr(self.__clause_element__(), key)
|
||||
|
||||
def operate(
|
||||
self, op: OperatorType, *other: Any, **kwargs: Any
|
||||
) -> ColumnElement[Any]:
|
||||
return op(self.__clause_element__(), *other, **kwargs) # type: ignore[no-any-return] # noqa: E501
|
||||
|
||||
def reverse_operate(
|
||||
self, op: OperatorType, other: Any, **kwargs: Any
|
||||
) -> ColumnElement[Any]:
|
||||
col = self.__clause_element__()
|
||||
return op(col._bind_param(op, other), col, **kwargs) # type: ignore[no-any-return] # noqa: E501
|
||||
|
||||
def __str__(self) -> str:
|
||||
if not self.parent or not self.key:
|
||||
return object.__repr__(self)
|
||||
return str(self.parent.class_.__name__) + "." + self.key
|
||||
|
||||
|
||||
class MappedSQLExpression(ColumnProperty[_T], _DeclarativeMapped[_T]):
|
||||
"""Declarative front-end for the :class:`.ColumnProperty` class.
|
||||
|
||||
Public constructor is the :func:`_orm.column_property` function.
|
||||
|
||||
.. versionchanged:: 2.0 Added :class:`_orm.MappedSQLExpression` as
|
||||
a Declarative compatible subclass for :class:`_orm.ColumnProperty`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.MappedColumn`
|
||||
|
||||
"""
|
||||
|
||||
inherit_cache = True
|
||||
""":meta private:"""
|
||||
|
||||
|
||||
class MappedColumn(
|
||||
_IntrospectsAnnotations,
|
||||
_MapsColumns[_T],
|
||||
_DeclarativeMapped[_T],
|
||||
):
|
||||
"""Maps a single :class:`_schema.Column` on a class.
|
||||
|
||||
:class:`_orm.MappedColumn` is a specialization of the
|
||||
:class:`_orm.ColumnProperty` class and is oriented towards declarative
|
||||
configuration.
|
||||
|
||||
To construct :class:`_orm.MappedColumn` objects, use the
|
||||
:func:`_orm.mapped_column` constructor function.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"column",
|
||||
"_creation_order",
|
||||
"_sort_order",
|
||||
"foreign_keys",
|
||||
"_has_nullable",
|
||||
"_has_insert_default",
|
||||
"deferred",
|
||||
"deferred_group",
|
||||
"deferred_raiseload",
|
||||
"active_history",
|
||||
"_attribute_options",
|
||||
"_has_dataclass_arguments",
|
||||
"_use_existing_column",
|
||||
)
|
||||
|
||||
deferred: Union[_NoArg, bool]
|
||||
deferred_raiseload: bool
|
||||
deferred_group: Optional[str]
|
||||
|
||||
column: Column[_T]
|
||||
foreign_keys: Optional[Set[ForeignKey]]
|
||||
_attribute_options: _AttributeOptions
|
||||
|
||||
def __init__(self, *arg: Any, **kw: Any):
|
||||
self._attribute_options = attr_opts = kw.pop(
|
||||
"attribute_options", _DEFAULT_ATTRIBUTE_OPTIONS
|
||||
)
|
||||
|
||||
self._use_existing_column = kw.pop("use_existing_column", False)
|
||||
|
||||
self._has_dataclass_arguments = (
|
||||
attr_opts is not None
|
||||
and attr_opts != _DEFAULT_ATTRIBUTE_OPTIONS
|
||||
and any(
|
||||
attr_opts[i] is not _NoArg.NO_ARG
|
||||
for i, attr in enumerate(attr_opts._fields)
|
||||
if attr != "dataclasses_default"
|
||||
)
|
||||
)
|
||||
|
||||
insert_default = kw.pop("insert_default", _NoArg.NO_ARG)
|
||||
self._has_insert_default = insert_default is not _NoArg.NO_ARG
|
||||
|
||||
if self._has_insert_default:
|
||||
kw["default"] = insert_default
|
||||
elif attr_opts.dataclasses_default is not _NoArg.NO_ARG:
|
||||
kw["default"] = attr_opts.dataclasses_default
|
||||
|
||||
self.deferred_group = kw.pop("deferred_group", None)
|
||||
self.deferred_raiseload = kw.pop("deferred_raiseload", None)
|
||||
self.deferred = kw.pop("deferred", _NoArg.NO_ARG)
|
||||
self.active_history = kw.pop("active_history", False)
|
||||
|
||||
self._sort_order = kw.pop("sort_order", _NoArg.NO_ARG)
|
||||
self.column = cast("Column[_T]", Column(*arg, **kw))
|
||||
self.foreign_keys = self.column.foreign_keys
|
||||
self._has_nullable = "nullable" in kw and kw.get("nullable") not in (
|
||||
None,
|
||||
SchemaConst.NULL_UNSPECIFIED,
|
||||
)
|
||||
util.set_creation_order(self)
|
||||
|
||||
def _copy(self, **kw: Any) -> Self:
|
||||
new = self.__class__.__new__(self.__class__)
|
||||
new.column = self.column._copy(**kw)
|
||||
new.deferred = self.deferred
|
||||
new.deferred_group = self.deferred_group
|
||||
new.deferred_raiseload = self.deferred_raiseload
|
||||
new.foreign_keys = new.column.foreign_keys
|
||||
new.active_history = self.active_history
|
||||
new._has_nullable = self._has_nullable
|
||||
new._attribute_options = self._attribute_options
|
||||
new._has_insert_default = self._has_insert_default
|
||||
new._has_dataclass_arguments = self._has_dataclass_arguments
|
||||
new._use_existing_column = self._use_existing_column
|
||||
new._sort_order = self._sort_order
|
||||
util.set_creation_order(new)
|
||||
return new
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.column.name
|
||||
|
||||
@property
|
||||
def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]:
|
||||
effective_deferred = self.deferred
|
||||
if effective_deferred is _NoArg.NO_ARG:
|
||||
effective_deferred = bool(
|
||||
self.deferred_group or self.deferred_raiseload
|
||||
)
|
||||
|
||||
if effective_deferred or self.active_history:
|
||||
return ColumnProperty(
|
||||
self.column,
|
||||
deferred=effective_deferred,
|
||||
group=self.deferred_group,
|
||||
raiseload=self.deferred_raiseload,
|
||||
attribute_options=self._attribute_options,
|
||||
active_history=self.active_history,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def columns_to_assign(self) -> List[Tuple[Column[Any], int]]:
|
||||
return [
|
||||
(
|
||||
self.column,
|
||||
(
|
||||
self._sort_order
|
||||
if self._sort_order is not _NoArg.NO_ARG
|
||||
else 0
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
def __clause_element__(self) -> Column[_T]:
|
||||
return self.column
|
||||
|
||||
def operate(
|
||||
self, op: OperatorType, *other: Any, **kwargs: Any
|
||||
) -> ColumnElement[Any]:
|
||||
return op(self.__clause_element__(), *other, **kwargs) # type: ignore[no-any-return] # noqa: E501
|
||||
|
||||
def reverse_operate(
|
||||
self, op: OperatorType, other: Any, **kwargs: Any
|
||||
) -> ColumnElement[Any]:
|
||||
col = self.__clause_element__()
|
||||
return op(col._bind_param(op, other), col, **kwargs) # type: ignore[no-any-return] # noqa: E501
|
||||
|
||||
def found_in_pep593_annotated(self) -> Any:
|
||||
# return a blank mapped_column(). This mapped_column()'s
|
||||
# Column will be merged into it in _init_column_for_annotation().
|
||||
return MappedColumn()
|
||||
|
||||
def declarative_scan(
|
||||
self,
|
||||
decl_scan: _ClassScanMapperConfig,
|
||||
registry: _RegistryType,
|
||||
cls: Type[Any],
|
||||
originating_module: Optional[str],
|
||||
key: str,
|
||||
mapped_container: Optional[Type[Mapped[Any]]],
|
||||
annotation: Optional[_AnnotationScanType],
|
||||
extracted_mapped_annotation: Optional[_AnnotationScanType],
|
||||
is_dataclass_field: bool,
|
||||
) -> None:
|
||||
column = self.column
|
||||
|
||||
if (
|
||||
self._use_existing_column
|
||||
and decl_scan.inherits
|
||||
and decl_scan.single
|
||||
):
|
||||
if decl_scan.is_deferred:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't use use_existing_column with deferred mappers"
|
||||
)
|
||||
supercls_mapper = class_mapper(decl_scan.inherits, False)
|
||||
|
||||
colname = column.name if column.name is not None else key
|
||||
column = self.column = supercls_mapper.local_table.c.get( # type: ignore[assignment] # noqa: E501
|
||||
colname, column
|
||||
)
|
||||
|
||||
if column.key is None:
|
||||
column.key = key
|
||||
if column.name is None:
|
||||
column.name = key
|
||||
|
||||
sqltype = column.type
|
||||
|
||||
if extracted_mapped_annotation is None:
|
||||
if sqltype._isnull and not self.column.foreign_keys:
|
||||
self._raise_for_required(key, cls)
|
||||
else:
|
||||
return
|
||||
|
||||
self._init_column_for_annotation(
|
||||
cls,
|
||||
registry,
|
||||
extracted_mapped_annotation,
|
||||
originating_module,
|
||||
)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.decl_base")
|
||||
def declarative_scan_for_composite(
|
||||
self,
|
||||
registry: _RegistryType,
|
||||
cls: Type[Any],
|
||||
originating_module: Optional[str],
|
||||
key: str,
|
||||
param_name: str,
|
||||
param_annotation: _AnnotationScanType,
|
||||
) -> None:
|
||||
decl_base = util.preloaded.orm_decl_base
|
||||
decl_base._undefer_column_name(param_name, self.column)
|
||||
self._init_column_for_annotation(
|
||||
cls, registry, param_annotation, originating_module
|
||||
)
|
||||
|
||||
def _init_column_for_annotation(
|
||||
self,
|
||||
cls: Type[Any],
|
||||
registry: _RegistryType,
|
||||
argument: _AnnotationScanType,
|
||||
originating_module: Optional[str],
|
||||
) -> None:
|
||||
sqltype = self.column.type
|
||||
|
||||
if is_fwd_ref(
|
||||
argument, check_generic=True, check_for_plain_string=True
|
||||
):
|
||||
assert originating_module is not None
|
||||
argument = de_stringify_annotation(
|
||||
cls, argument, originating_module, include_generic=True
|
||||
)
|
||||
|
||||
nullable = includes_none(argument)
|
||||
|
||||
if not self._has_nullable:
|
||||
self.column.nullable = nullable
|
||||
|
||||
our_type = de_optionalize_union_types(argument)
|
||||
|
||||
find_mapped_in: Tuple[Any, ...] = ()
|
||||
our_type_is_pep593 = False
|
||||
raw_pep_593_type = None
|
||||
|
||||
if is_pep593(our_type):
|
||||
our_type_is_pep593 = True
|
||||
|
||||
pep_593_components = get_args(our_type)
|
||||
raw_pep_593_type = pep_593_components[0]
|
||||
if nullable:
|
||||
raw_pep_593_type = de_optionalize_union_types(raw_pep_593_type)
|
||||
find_mapped_in = pep_593_components[1:]
|
||||
elif is_pep695(argument) and is_pep593(argument.__value__):
|
||||
# do not support nested annotation inside unions ets
|
||||
find_mapped_in = get_args(argument.__value__)[1:]
|
||||
|
||||
use_args_from: Optional[MappedColumn[Any]]
|
||||
for elem in find_mapped_in:
|
||||
if isinstance(elem, MappedColumn):
|
||||
use_args_from = elem
|
||||
break
|
||||
else:
|
||||
use_args_from = None
|
||||
|
||||
if use_args_from is not None:
|
||||
if (
|
||||
not self._has_insert_default
|
||||
and use_args_from.column.default is not None
|
||||
):
|
||||
self.column.default = None
|
||||
|
||||
use_args_from.column._merge(self.column)
|
||||
sqltype = self.column.type
|
||||
|
||||
if (
|
||||
use_args_from.deferred is not _NoArg.NO_ARG
|
||||
and self.deferred is _NoArg.NO_ARG
|
||||
):
|
||||
self.deferred = use_args_from.deferred
|
||||
|
||||
if (
|
||||
use_args_from.deferred_group is not None
|
||||
and self.deferred_group is None
|
||||
):
|
||||
self.deferred_group = use_args_from.deferred_group
|
||||
|
||||
if (
|
||||
use_args_from.deferred_raiseload is not None
|
||||
and self.deferred_raiseload is None
|
||||
):
|
||||
self.deferred_raiseload = use_args_from.deferred_raiseload
|
||||
|
||||
if (
|
||||
use_args_from._use_existing_column
|
||||
and not self._use_existing_column
|
||||
):
|
||||
self._use_existing_column = True
|
||||
|
||||
if use_args_from.active_history:
|
||||
self.active_history = use_args_from.active_history
|
||||
|
||||
if (
|
||||
use_args_from._sort_order is not None
|
||||
and self._sort_order is _NoArg.NO_ARG
|
||||
):
|
||||
self._sort_order = use_args_from._sort_order
|
||||
|
||||
if (
|
||||
use_args_from.column.key is not None
|
||||
or use_args_from.column.name is not None
|
||||
):
|
||||
util.warn_deprecated(
|
||||
"Can't use the 'key' or 'name' arguments in "
|
||||
"Annotated with mapped_column(); this will be ignored",
|
||||
"2.0.22",
|
||||
)
|
||||
|
||||
if use_args_from._has_dataclass_arguments:
|
||||
for idx, arg in enumerate(
|
||||
use_args_from._attribute_options._fields
|
||||
):
|
||||
if (
|
||||
use_args_from._attribute_options[idx]
|
||||
is not _NoArg.NO_ARG
|
||||
):
|
||||
arg = arg.replace("dataclasses_", "")
|
||||
util.warn_deprecated(
|
||||
f"Argument '{arg}' is a dataclass argument and "
|
||||
"cannot be specified within a mapped_column() "
|
||||
"bundled inside of an Annotated object",
|
||||
"2.0.22",
|
||||
)
|
||||
|
||||
if sqltype._isnull and not self.column.foreign_keys:
|
||||
new_sqltype = None
|
||||
|
||||
checks: List[Any]
|
||||
if our_type_is_pep593:
|
||||
checks = [our_type, raw_pep_593_type]
|
||||
else:
|
||||
checks = [our_type]
|
||||
|
||||
for check_type in checks:
|
||||
new_sqltype = registry._resolve_type(check_type)
|
||||
if new_sqltype is not None:
|
||||
break
|
||||
else:
|
||||
if isinstance(our_type, TypeEngine) or (
|
||||
isinstance(our_type, type)
|
||||
and issubclass(our_type, TypeEngine)
|
||||
):
|
||||
raise orm_exc.MappedAnnotationError(
|
||||
f"The type provided inside the {self.column.key!r} "
|
||||
"attribute Mapped annotation is the SQLAlchemy type "
|
||||
f"{our_type}. Expected a Python type instead"
|
||||
)
|
||||
elif is_a_type(our_type):
|
||||
raise orm_exc.MappedAnnotationError(
|
||||
"Could not locate SQLAlchemy Core type for Python "
|
||||
f"type {our_type} inside the {self.column.key!r} "
|
||||
"attribute Mapped annotation"
|
||||
)
|
||||
else:
|
||||
raise orm_exc.MappedAnnotationError(
|
||||
f"The object provided inside the {self.column.key!r} "
|
||||
"attribute Mapped annotation is not a Python type, "
|
||||
f"it's the object {our_type!r}. Expected a Python "
|
||||
"type."
|
||||
)
|
||||
|
||||
self.column._set_type(new_sqltype)
|
3454
venv/lib/python3.11/site-packages/sqlalchemy/orm/query.py
Normal file
3454
venv/lib/python3.11/site-packages/sqlalchemy/orm/query.py
Normal file
File diff suppressed because it is too large
Load Diff
3514
venv/lib/python3.11/site-packages/sqlalchemy/orm/relationships.py
Normal file
3514
venv/lib/python3.11/site-packages/sqlalchemy/orm/relationships.py
Normal file
File diff suppressed because it is too large
Load Diff
2163
venv/lib/python3.11/site-packages/sqlalchemy/orm/scoping.py
Normal file
2163
venv/lib/python3.11/site-packages/sqlalchemy/orm/scoping.py
Normal file
File diff suppressed because it is too large
Load Diff
5302
venv/lib/python3.11/site-packages/sqlalchemy/orm/session.py
Normal file
5302
venv/lib/python3.11/site-packages/sqlalchemy/orm/session.py
Normal file
File diff suppressed because it is too large
Load Diff
1143
venv/lib/python3.11/site-packages/sqlalchemy/orm/state.py
Normal file
1143
venv/lib/python3.11/site-packages/sqlalchemy/orm/state.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,198 @@
|
||||
# orm/state_changes.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""State tracking utilities used by :class:`_orm.Session`.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Iterator
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
from ..util.typing import Literal
|
||||
|
||||
_F = TypeVar("_F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
class _StateChangeState(Enum):
|
||||
pass
|
||||
|
||||
|
||||
class _StateChangeStates(_StateChangeState):
|
||||
ANY = 1
|
||||
NO_CHANGE = 2
|
||||
CHANGE_IN_PROGRESS = 3
|
||||
|
||||
|
||||
class _StateChange:
|
||||
"""Supplies state assertion decorators.
|
||||
|
||||
The current use case is for the :class:`_orm.SessionTransaction` class. The
|
||||
:class:`_StateChange` class itself is agnostic of the
|
||||
:class:`_orm.SessionTransaction` class so could in theory be generalized
|
||||
for other systems as well.
|
||||
|
||||
"""
|
||||
|
||||
_next_state: _StateChangeState = _StateChangeStates.ANY
|
||||
_state: _StateChangeState = _StateChangeStates.NO_CHANGE
|
||||
_current_fn: Optional[Callable[..., Any]] = None
|
||||
|
||||
def _raise_for_prerequisite_state(
|
||||
self, operation_name: str, state: _StateChangeState
|
||||
) -> NoReturn:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Can't run operation '{operation_name}()' when Session "
|
||||
f"is in state {state!r}",
|
||||
code="isce",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def declare_states(
|
||||
cls,
|
||||
prerequisite_states: Union[
|
||||
Literal[_StateChangeStates.ANY], Tuple[_StateChangeState, ...]
|
||||
],
|
||||
moves_to: _StateChangeState,
|
||||
) -> Callable[[_F], _F]:
|
||||
"""Method decorator declaring valid states.
|
||||
|
||||
:param prerequisite_states: sequence of acceptable prerequisite
|
||||
states. Can be the single constant _State.ANY to indicate no
|
||||
prerequisite state
|
||||
|
||||
:param moves_to: the expected state at the end of the method, assuming
|
||||
no exceptions raised. Can be the constant _State.NO_CHANGE to
|
||||
indicate state should not change at the end of the method.
|
||||
|
||||
"""
|
||||
assert prerequisite_states, "no prequisite states sent"
|
||||
has_prerequisite_states = (
|
||||
prerequisite_states is not _StateChangeStates.ANY
|
||||
)
|
||||
|
||||
prerequisite_state_collection = cast(
|
||||
"Tuple[_StateChangeState, ...]", prerequisite_states
|
||||
)
|
||||
expect_state_change = moves_to is not _StateChangeStates.NO_CHANGE
|
||||
|
||||
@util.decorator
|
||||
def _go(fn: _F, self: Any, *arg: Any, **kw: Any) -> Any:
|
||||
current_state = self._state
|
||||
|
||||
if (
|
||||
has_prerequisite_states
|
||||
and current_state not in prerequisite_state_collection
|
||||
):
|
||||
self._raise_for_prerequisite_state(fn.__name__, current_state)
|
||||
|
||||
next_state = self._next_state
|
||||
existing_fn = self._current_fn
|
||||
expect_state = moves_to if expect_state_change else current_state
|
||||
|
||||
if (
|
||||
# destination states are restricted
|
||||
next_state is not _StateChangeStates.ANY
|
||||
# method seeks to change state
|
||||
and expect_state_change
|
||||
# destination state incorrect
|
||||
and next_state is not expect_state
|
||||
):
|
||||
if existing_fn and next_state in (
|
||||
_StateChangeStates.NO_CHANGE,
|
||||
_StateChangeStates.CHANGE_IN_PROGRESS,
|
||||
):
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Method '{fn.__name__}()' can't be called here; "
|
||||
f"method '{existing_fn.__name__}()' is already "
|
||||
f"in progress and this would cause an unexpected "
|
||||
f"state change to {moves_to!r}",
|
||||
code="isce",
|
||||
)
|
||||
else:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Cant run operation '{fn.__name__}()' here; "
|
||||
f"will move to state {moves_to!r} where we are "
|
||||
f"expecting {next_state!r}",
|
||||
code="isce",
|
||||
)
|
||||
|
||||
self._current_fn = fn
|
||||
self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS
|
||||
try:
|
||||
ret_value = fn(self, *arg, **kw)
|
||||
except:
|
||||
raise
|
||||
else:
|
||||
if self._state is expect_state:
|
||||
return ret_value
|
||||
|
||||
if self._state is current_state:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Method '{fn.__name__}()' failed to "
|
||||
"change state "
|
||||
f"to {moves_to!r} as expected",
|
||||
code="isce",
|
||||
)
|
||||
elif existing_fn:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"While method '{existing_fn.__name__}()' was "
|
||||
"running, "
|
||||
f"method '{fn.__name__}()' caused an "
|
||||
"unexpected "
|
||||
f"state change to {self._state!r}",
|
||||
code="isce",
|
||||
)
|
||||
else:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Method '{fn.__name__}()' caused an unexpected "
|
||||
f"state change to {self._state!r}",
|
||||
code="isce",
|
||||
)
|
||||
|
||||
finally:
|
||||
self._next_state = next_state
|
||||
self._current_fn = existing_fn
|
||||
|
||||
return _go
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _expect_state(self, expected: _StateChangeState) -> Iterator[Any]:
|
||||
"""called within a method that changes states.
|
||||
|
||||
method must also use the ``@declare_states()`` decorator.
|
||||
|
||||
"""
|
||||
assert self._next_state is _StateChangeStates.CHANGE_IN_PROGRESS, (
|
||||
"Unexpected call to _expect_state outside of "
|
||||
"state-changing method"
|
||||
)
|
||||
|
||||
self._next_state = expected
|
||||
try:
|
||||
yield
|
||||
except:
|
||||
raise
|
||||
else:
|
||||
if self._state is not expected:
|
||||
raise sa_exc.IllegalStateChangeError(
|
||||
f"Unexpected state change to {self._state!r}", code="isce"
|
||||
)
|
||||
finally:
|
||||
self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS
|
3473
venv/lib/python3.11/site-packages/sqlalchemy/orm/strategies.py
Normal file
3473
venv/lib/python3.11/site-packages/sqlalchemy/orm/strategies.py
Normal file
File diff suppressed because it is too large
Load Diff
2550
venv/lib/python3.11/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
2550
venv/lib/python3.11/site-packages/sqlalchemy/orm/strategy_options.py
Normal file
File diff suppressed because it is too large
Load Diff
164
venv/lib/python3.11/site-packages/sqlalchemy/orm/sync.py
Normal file
164
venv/lib/python3.11/site-packages/sqlalchemy/orm/sync.py
Normal file
@ -0,0 +1,164 @@
|
||||
# orm/sync.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: allow-untyped-defs, allow-untyped-calls
|
||||
|
||||
|
||||
"""private module containing functions used for copying data
|
||||
between instances based on join conditions.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from . import exc
|
||||
from . import util as orm_util
|
||||
from .base import PassiveFlag
|
||||
|
||||
|
||||
def populate(
|
||||
source,
|
||||
source_mapper,
|
||||
dest,
|
||||
dest_mapper,
|
||||
synchronize_pairs,
|
||||
uowcommit,
|
||||
flag_cascaded_pks,
|
||||
):
|
||||
source_dict = source.dict
|
||||
dest_dict = dest.dict
|
||||
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
# inline of source_mapper._get_state_attr_by_column
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source.manager[prop.key].impl.get(
|
||||
source, source_dict, PassiveFlag.PASSIVE_OFF
|
||||
)
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r, err)
|
||||
|
||||
try:
|
||||
# inline of dest_mapper._set_state_attr_by_column
|
||||
prop = dest_mapper._columntoproperty[r]
|
||||
dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(True, source_mapper, l, dest_mapper, r, err)
|
||||
|
||||
# technically the "r.primary_key" check isn't
|
||||
# needed here, but we check for this condition to limit
|
||||
# how often this logic is invoked for memory/performance
|
||||
# reasons, since we only need this info for a primary key
|
||||
# destination.
|
||||
if (
|
||||
flag_cascaded_pks
|
||||
and l.primary_key
|
||||
and r.primary_key
|
||||
and r.references(l)
|
||||
):
|
||||
uowcommit.attributes[("pk_cascaded", dest, r)] = True
|
||||
|
||||
|
||||
def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
|
||||
# a simplified version of populate() used by bulk insert mode
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
value = source_dict[prop.key]
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(False, source_mapper, l, source_mapper, r, err)
|
||||
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[r]
|
||||
source_dict[prop.key] = value
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(True, source_mapper, l, source_mapper, r, err)
|
||||
|
||||
|
||||
def clear(dest, dest_mapper, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
if (
|
||||
r.primary_key
|
||||
and dest_mapper._get_state_attr_by_column(dest, dest.dict, r)
|
||||
not in orm_util._none_set
|
||||
):
|
||||
raise AssertionError(
|
||||
f"Dependency rule on column '{l}' "
|
||||
"tried to blank-out primary key "
|
||||
f"column '{r}' on instance '{orm_util.state_str(dest)}'"
|
||||
)
|
||||
try:
|
||||
dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(True, None, l, dest_mapper, r, err)
|
||||
|
||||
|
||||
def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
oldvalue = source_mapper._get_committed_attr_by_column(
|
||||
source.obj(), l
|
||||
)
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF
|
||||
)
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r, err)
|
||||
dest[r.key] = value
|
||||
dest[old_prefix + r.key] = oldvalue
|
||||
|
||||
|
||||
def populate_dict(source, source_mapper, dict_, synchronize_pairs):
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
value = source_mapper._get_state_attr_by_column(
|
||||
source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF
|
||||
)
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r, err)
|
||||
|
||||
dict_[r.key] = value
|
||||
|
||||
|
||||
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
|
||||
"""return true if the source object has changes from an old to a
|
||||
new value on the given synchronize pairs
|
||||
|
||||
"""
|
||||
for l, r in synchronize_pairs:
|
||||
try:
|
||||
prop = source_mapper._columntoproperty[l]
|
||||
except exc.UnmappedColumnError as err:
|
||||
_raise_col_to_prop(False, source_mapper, l, None, r, err)
|
||||
history = uowcommit.get_attribute_history(
|
||||
source, prop.key, PassiveFlag.PASSIVE_NO_INITIALIZE
|
||||
)
|
||||
if bool(history.deleted):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _raise_col_to_prop(
|
||||
isdest, source_mapper, source_column, dest_mapper, dest_column, err
|
||||
):
|
||||
if isdest:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"destination column '%s'; mapper '%s' does not map "
|
||||
"this column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include this column (or use "
|
||||
"a viewonly=True relation)." % (dest_column, dest_mapper)
|
||||
) from err
|
||||
else:
|
||||
raise exc.UnmappedColumnError(
|
||||
"Can't execute sync rule for "
|
||||
"source column '%s'; mapper '%s' does not map this "
|
||||
"column. Try using an explicit `foreign_keys` "
|
||||
"collection which does not include destination column "
|
||||
"'%s' (or use a viewonly=True relation)."
|
||||
% (source_column, source_mapper, dest_column)
|
||||
) from err
|
796
venv/lib/python3.11/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
796
venv/lib/python3.11/site-packages/sqlalchemy/orm/unitofwork.py
Normal file
@ -0,0 +1,796 @@
|
||||
# orm/unitofwork.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
"""The internals for the unit of work system.
|
||||
|
||||
The session's flush() process passes objects to a contextual object
|
||||
here, which assembles flush tasks based on mappers and their properties,
|
||||
organizes them in order of dependency, and executes.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from . import attributes
|
||||
from . import exc as orm_exc
|
||||
from . import util as orm_util
|
||||
from .. import event
|
||||
from .. import util
|
||||
from ..util import topological
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .dependency import DependencyProcessor
|
||||
from .interfaces import MapperProperty
|
||||
from .mapper import Mapper
|
||||
from .session import Session
|
||||
from .session import SessionTransaction
|
||||
from .state import InstanceState
|
||||
|
||||
|
||||
def track_cascade_events(descriptor, prop):
|
||||
"""Establish event listeners on object attributes which handle
|
||||
cascade-on-set/append.
|
||||
|
||||
"""
|
||||
key = prop.key
|
||||
|
||||
def append(state, item, initiator, **kw):
|
||||
# process "save_update" cascade rules for when
|
||||
# an instance is appended to the list of another instance
|
||||
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("collection append")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
item_state = attributes.instance_state(item)
|
||||
|
||||
if (
|
||||
prop._cascade.save_update
|
||||
and (key == initiator.key)
|
||||
and not sess._contains_state(item_state)
|
||||
):
|
||||
sess._save_or_update_state(item_state)
|
||||
return item
|
||||
|
||||
def remove(state, item, initiator, **kw):
|
||||
if item is None:
|
||||
return
|
||||
|
||||
sess = state.session
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
|
||||
if sess and sess._warn_on_events:
|
||||
sess._flush_warning(
|
||||
"collection remove"
|
||||
if prop.uselist
|
||||
else "related attribute delete"
|
||||
)
|
||||
|
||||
if (
|
||||
item is not None
|
||||
and item is not attributes.NEVER_SET
|
||||
and item is not attributes.PASSIVE_NO_RESULT
|
||||
and prop._cascade.delete_orphan
|
||||
):
|
||||
# expunge pending orphans
|
||||
item_state = attributes.instance_state(item)
|
||||
|
||||
if prop.mapper._is_orphan(item_state):
|
||||
if sess and item_state in sess._new:
|
||||
sess.expunge(item)
|
||||
else:
|
||||
# the related item may or may not itself be in a
|
||||
# Session, however the parent for which we are catching
|
||||
# the event is not in a session, so memoize this on the
|
||||
# item
|
||||
item_state._orphaned_outside_of_session = True
|
||||
|
||||
def set_(state, newvalue, oldvalue, initiator, **kw):
|
||||
# process "save_update" cascade rules for when an instance
|
||||
# is attached to another instance
|
||||
if oldvalue is newvalue:
|
||||
return newvalue
|
||||
|
||||
sess = state.session
|
||||
if sess:
|
||||
if sess._warn_on_events:
|
||||
sess._flush_warning("related attribute set")
|
||||
|
||||
prop = state.manager.mapper._props[key]
|
||||
if newvalue is not None:
|
||||
newvalue_state = attributes.instance_state(newvalue)
|
||||
if (
|
||||
prop._cascade.save_update
|
||||
and (key == initiator.key)
|
||||
and not sess._contains_state(newvalue_state)
|
||||
):
|
||||
sess._save_or_update_state(newvalue_state)
|
||||
|
||||
if (
|
||||
oldvalue is not None
|
||||
and oldvalue is not attributes.NEVER_SET
|
||||
and oldvalue is not attributes.PASSIVE_NO_RESULT
|
||||
and prop._cascade.delete_orphan
|
||||
):
|
||||
# possible to reach here with attributes.NEVER_SET ?
|
||||
oldvalue_state = attributes.instance_state(oldvalue)
|
||||
|
||||
if oldvalue_state in sess._new and prop.mapper._is_orphan(
|
||||
oldvalue_state
|
||||
):
|
||||
sess.expunge(oldvalue)
|
||||
return newvalue
|
||||
|
||||
event.listen(
|
||||
descriptor, "append_wo_mutation", append, raw=True, include_key=True
|
||||
)
|
||||
event.listen(
|
||||
descriptor, "append", append, raw=True, retval=True, include_key=True
|
||||
)
|
||||
event.listen(
|
||||
descriptor, "remove", remove, raw=True, retval=True, include_key=True
|
||||
)
|
||||
event.listen(
|
||||
descriptor, "set", set_, raw=True, retval=True, include_key=True
|
||||
)
|
||||
|
||||
|
||||
class UOWTransaction:
|
||||
session: Session
|
||||
transaction: SessionTransaction
|
||||
attributes: Dict[str, Any]
|
||||
deps: util.defaultdict[Mapper[Any], Set[DependencyProcessor]]
|
||||
mappers: util.defaultdict[Mapper[Any], Set[InstanceState[Any]]]
|
||||
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
# dictionary used by external actors to
|
||||
# store arbitrary state information.
|
||||
self.attributes = {}
|
||||
|
||||
# dictionary of mappers to sets of
|
||||
# DependencyProcessors, which are also
|
||||
# set to be part of the sorted flush actions,
|
||||
# which have that mapper as a parent.
|
||||
self.deps = util.defaultdict(set)
|
||||
|
||||
# dictionary of mappers to sets of InstanceState
|
||||
# items pending for flush which have that mapper
|
||||
# as a parent.
|
||||
self.mappers = util.defaultdict(set)
|
||||
|
||||
# a dictionary of Preprocess objects, which gather
|
||||
# additional states impacted by the flush
|
||||
# and determine if a flush action is needed
|
||||
self.presort_actions = {}
|
||||
|
||||
# dictionary of PostSortRec objects, each
|
||||
# one issues work during the flush within
|
||||
# a certain ordering.
|
||||
self.postsort_actions = {}
|
||||
|
||||
# a set of 2-tuples, each containing two
|
||||
# PostSortRec objects where the second
|
||||
# is dependent on the first being executed
|
||||
# first
|
||||
self.dependencies = set()
|
||||
|
||||
# dictionary of InstanceState-> (isdelete, listonly)
|
||||
# tuples, indicating if this state is to be deleted
|
||||
# or insert/updated, or just refreshed
|
||||
self.states = {}
|
||||
|
||||
# tracks InstanceStates which will be receiving
|
||||
# a "post update" call. Keys are mappers,
|
||||
# values are a set of states and a set of the
|
||||
# columns which should be included in the update.
|
||||
self.post_update_states = util.defaultdict(lambda: (set(), set()))
|
||||
|
||||
@property
|
||||
def has_work(self):
|
||||
return bool(self.states)
|
||||
|
||||
def was_already_deleted(self, state):
|
||||
"""Return ``True`` if the given state is expired and was deleted
|
||||
previously.
|
||||
"""
|
||||
if state.expired:
|
||||
try:
|
||||
state._load_expired(state, attributes.PASSIVE_OFF)
|
||||
except orm_exc.ObjectDeletedError:
|
||||
self.session._remove_newly_deleted([state])
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_deleted(self, state):
|
||||
"""Return ``True`` if the given state is marked as deleted
|
||||
within this uowtransaction."""
|
||||
|
||||
return state in self.states and self.states[state][0]
|
||||
|
||||
def memo(self, key, callable_):
|
||||
if key in self.attributes:
|
||||
return self.attributes[key]
|
||||
else:
|
||||
self.attributes[key] = ret = callable_()
|
||||
return ret
|
||||
|
||||
def remove_state_actions(self, state):
|
||||
"""Remove pending actions for a state from the uowtransaction."""
|
||||
|
||||
isdelete = self.states[state][0]
|
||||
|
||||
self.states[state] = (isdelete, True)
|
||||
|
||||
def get_attribute_history(
|
||||
self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE
|
||||
):
|
||||
"""Facade to attributes.get_state_history(), including
|
||||
caching of results."""
|
||||
|
||||
hashkey = ("history", state, key)
|
||||
|
||||
# cache the objects, not the states; the strong reference here
|
||||
# prevents newly loaded objects from being dereferenced during the
|
||||
# flush process
|
||||
|
||||
if hashkey in self.attributes:
|
||||
history, state_history, cached_passive = self.attributes[hashkey]
|
||||
# if the cached lookup was "passive" and now
|
||||
# we want non-passive, do a non-passive lookup and re-cache
|
||||
|
||||
if (
|
||||
not cached_passive & attributes.SQL_OK
|
||||
and passive & attributes.SQL_OK
|
||||
):
|
||||
impl = state.manager[key].impl
|
||||
history = impl.get_history(
|
||||
state,
|
||||
state.dict,
|
||||
attributes.PASSIVE_OFF
|
||||
| attributes.LOAD_AGAINST_COMMITTED
|
||||
| attributes.NO_RAISE,
|
||||
)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history, passive)
|
||||
else:
|
||||
impl = state.manager[key].impl
|
||||
# TODO: store the history as (state, object) tuples
|
||||
# so we don't have to keep converting here
|
||||
history = impl.get_history(
|
||||
state,
|
||||
state.dict,
|
||||
passive
|
||||
| attributes.LOAD_AGAINST_COMMITTED
|
||||
| attributes.NO_RAISE,
|
||||
)
|
||||
if history and impl.uses_objects:
|
||||
state_history = history.as_state()
|
||||
else:
|
||||
state_history = history
|
||||
self.attributes[hashkey] = (history, state_history, passive)
|
||||
|
||||
return state_history
|
||||
|
||||
def has_dep(self, processor):
|
||||
return (processor, True) in self.presort_actions
|
||||
|
||||
def register_preprocessor(self, processor, fromparent):
|
||||
key = (processor, fromparent)
|
||||
if key not in self.presort_actions:
|
||||
self.presort_actions[key] = Preprocess(processor, fromparent)
|
||||
|
||||
def register_object(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
isdelete: bool = False,
|
||||
listonly: bool = False,
|
||||
cancel_delete: bool = False,
|
||||
operation: Optional[str] = None,
|
||||
prop: Optional[MapperProperty] = None,
|
||||
) -> bool:
|
||||
if not self.session._contains_state(state):
|
||||
# this condition is normal when objects are registered
|
||||
# as part of a relationship cascade operation. it should
|
||||
# not occur for the top-level register from Session.flush().
|
||||
if not state.deleted and operation is not None:
|
||||
util.warn(
|
||||
"Object of type %s not in session, %s operation "
|
||||
"along '%s' will not proceed"
|
||||
% (orm_util.state_class_str(state), operation, prop)
|
||||
)
|
||||
return False
|
||||
|
||||
if state not in self.states:
|
||||
mapper = state.manager.mapper
|
||||
|
||||
if mapper not in self.mappers:
|
||||
self._per_mapper_flush_actions(mapper)
|
||||
|
||||
self.mappers[mapper].add(state)
|
||||
self.states[state] = (isdelete, listonly)
|
||||
else:
|
||||
if not listonly and (isdelete or cancel_delete):
|
||||
self.states[state] = (isdelete, False)
|
||||
return True
|
||||
|
||||
def register_post_update(self, state, post_update_cols):
|
||||
mapper = state.manager.mapper.base_mapper
|
||||
states, cols = self.post_update_states[mapper]
|
||||
states.add(state)
|
||||
cols.update(post_update_cols)
|
||||
|
||||
def _per_mapper_flush_actions(self, mapper):
|
||||
saves = SaveUpdateAll(self, mapper.base_mapper)
|
||||
deletes = DeleteAll(self, mapper.base_mapper)
|
||||
self.dependencies.add((saves, deletes))
|
||||
|
||||
for dep in mapper._dependency_processors:
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
for prop in mapper.relationships:
|
||||
if prop.viewonly:
|
||||
continue
|
||||
dep = prop._dependency_processor
|
||||
dep.per_property_preprocessors(self)
|
||||
|
||||
@util.memoized_property
|
||||
def _mapper_for_dep(self):
|
||||
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
|
||||
True or False, indicating if the DependencyProcessor operates
|
||||
on objects of that Mapper.
|
||||
|
||||
The result is stored in the dictionary persistently once
|
||||
calculated.
|
||||
|
||||
"""
|
||||
return util.PopulateDict(
|
||||
lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
|
||||
)
|
||||
|
||||
def filter_states_for_dep(self, dep, states):
|
||||
"""Filter the given list of InstanceStates to those relevant to the
|
||||
given DependencyProcessor.
|
||||
|
||||
"""
|
||||
mapper_for_dep = self._mapper_for_dep
|
||||
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
|
||||
|
||||
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
|
||||
checktup = (isdelete, listonly)
|
||||
for mapper in mapper.base_mapper.self_and_descendants:
|
||||
for state in self.mappers[mapper]:
|
||||
if self.states[state] == checktup:
|
||||
yield state
|
||||
|
||||
def _generate_actions(self):
|
||||
"""Generate the full, unsorted collection of PostSortRecs as
|
||||
well as dependency pairs for this UOWTransaction.
|
||||
|
||||
"""
|
||||
# execute presort_actions, until all states
|
||||
# have been processed. a presort_action might
|
||||
# add new states to the uow.
|
||||
while True:
|
||||
ret = False
|
||||
for action in list(self.presort_actions.values()):
|
||||
if action.execute(self):
|
||||
ret = True
|
||||
if not ret:
|
||||
break
|
||||
|
||||
# see if the graph of mapper dependencies has cycles.
|
||||
self.cycles = cycles = topological.find_cycles(
|
||||
self.dependencies, list(self.postsort_actions.values())
|
||||
)
|
||||
|
||||
if cycles:
|
||||
# if yes, break the per-mapper actions into
|
||||
# per-state actions
|
||||
convert = {
|
||||
rec: set(rec.per_state_flush_actions(self)) for rec in cycles
|
||||
}
|
||||
|
||||
# rewrite the existing dependencies to point to
|
||||
# the per-state actions for those per-mapper actions
|
||||
# that were broken up.
|
||||
for edge in list(self.dependencies):
|
||||
if (
|
||||
None in edge
|
||||
or edge[0].disabled
|
||||
or edge[1].disabled
|
||||
or cycles.issuperset(edge)
|
||||
):
|
||||
self.dependencies.remove(edge)
|
||||
elif edge[0] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[0]]:
|
||||
self.dependencies.add((dep, edge[1]))
|
||||
elif edge[1] in cycles:
|
||||
self.dependencies.remove(edge)
|
||||
for dep in convert[edge[1]]:
|
||||
self.dependencies.add((edge[0], dep))
|
||||
|
||||
return {
|
||||
a for a in self.postsort_actions.values() if not a.disabled
|
||||
}.difference(cycles)
|
||||
|
||||
def execute(self) -> None:
|
||||
postsort_actions = self._generate_actions()
|
||||
|
||||
postsort_actions = sorted(
|
||||
postsort_actions,
|
||||
key=lambda item: item.sort_key,
|
||||
)
|
||||
# sort = topological.sort(self.dependencies, postsort_actions)
|
||||
# print "--------------"
|
||||
# print "\ndependencies:", self.dependencies
|
||||
# print "\ncycles:", self.cycles
|
||||
# print "\nsort:", list(sort)
|
||||
# print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
|
||||
|
||||
# execute
|
||||
if self.cycles:
|
||||
for subset in topological.sort_as_subsets(
|
||||
self.dependencies, postsort_actions
|
||||
):
|
||||
set_ = set(subset)
|
||||
while set_:
|
||||
n = set_.pop()
|
||||
n.execute_aggregate(self, set_)
|
||||
else:
|
||||
for rec in topological.sort(self.dependencies, postsort_actions):
|
||||
rec.execute(self)
|
||||
|
||||
def finalize_flush_changes(self) -> None:
|
||||
"""Mark processed objects as clean / deleted after a successful
|
||||
flush().
|
||||
|
||||
This method is called within the flush() method after the
|
||||
execute() method has succeeded and the transaction has been committed.
|
||||
|
||||
"""
|
||||
if not self.states:
|
||||
return
|
||||
|
||||
states = set(self.states)
|
||||
isdel = {
|
||||
s for (s, (isdelete, listonly)) in self.states.items() if isdelete
|
||||
}
|
||||
other = states.difference(isdel)
|
||||
if isdel:
|
||||
self.session._remove_newly_deleted(isdel)
|
||||
if other:
|
||||
self.session._register_persistent(other)
|
||||
|
||||
|
||||
class IterateMappersMixin:
|
||||
__slots__ = ()
|
||||
|
||||
def _mappers(self, uow):
|
||||
if self.fromparent:
|
||||
return iter(
|
||||
m
|
||||
for m in self.dependency_processor.parent.self_and_descendants
|
||||
if uow._mapper_for_dep[(m, self.dependency_processor)]
|
||||
)
|
||||
else:
|
||||
return self.dependency_processor.mapper.self_and_descendants
|
||||
|
||||
|
||||
class Preprocess(IterateMappersMixin):
|
||||
__slots__ = (
|
||||
"dependency_processor",
|
||||
"fromparent",
|
||||
"processed",
|
||||
"setup_flush_actions",
|
||||
)
|
||||
|
||||
def __init__(self, dependency_processor, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.fromparent = fromparent
|
||||
self.processed = set()
|
||||
self.setup_flush_actions = False
|
||||
|
||||
def execute(self, uow):
|
||||
delete_states = set()
|
||||
save_states = set()
|
||||
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper].difference(self.processed):
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if not listonly:
|
||||
if isdelete:
|
||||
delete_states.add(state)
|
||||
else:
|
||||
save_states.add(state)
|
||||
|
||||
if delete_states:
|
||||
self.dependency_processor.presort_deletes(uow, delete_states)
|
||||
self.processed.update(delete_states)
|
||||
if save_states:
|
||||
self.dependency_processor.presort_saves(uow, save_states)
|
||||
self.processed.update(save_states)
|
||||
|
||||
if delete_states or save_states:
|
||||
if not self.setup_flush_actions and (
|
||||
self.dependency_processor.prop_has_changes(
|
||||
uow, delete_states, True
|
||||
)
|
||||
or self.dependency_processor.prop_has_changes(
|
||||
uow, save_states, False
|
||||
)
|
||||
):
|
||||
self.dependency_processor.per_property_flush_actions(uow)
|
||||
self.setup_flush_actions = True
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class PostSortRec:
|
||||
__slots__ = ("disabled",)
|
||||
|
||||
def __new__(cls, uow, *args):
|
||||
key = (cls,) + args
|
||||
if key in uow.postsort_actions:
|
||||
return uow.postsort_actions[key]
|
||||
else:
|
||||
uow.postsort_actions[key] = ret = object.__new__(cls)
|
||||
ret.disabled = False
|
||||
return ret
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
self.execute(uow)
|
||||
|
||||
|
||||
class ProcessAll(IterateMappersMixin, PostSortRec):
|
||||
__slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key"
|
||||
|
||||
def __init__(self, uow, dependency_processor, isdelete, fromparent):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.sort_key = (
|
||||
"ProcessAll",
|
||||
self.dependency_processor.sort_key,
|
||||
isdelete,
|
||||
)
|
||||
self.isdelete = isdelete
|
||||
self.fromparent = fromparent
|
||||
uow.deps[dependency_processor.parent.base_mapper].add(
|
||||
dependency_processor
|
||||
)
|
||||
|
||||
def execute(self, uow):
|
||||
states = self._elements(uow)
|
||||
if self.isdelete:
|
||||
self.dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
self.dependency_processor.process_saves(uow, states)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
# this is handled by SaveUpdateAll and DeleteAll,
|
||||
# since a ProcessAll should unconditionally be pulled
|
||||
# into per-state if either the parent/child mappers
|
||||
# are part of a cycle
|
||||
return iter([])
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, isdelete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
self.isdelete,
|
||||
)
|
||||
|
||||
def _elements(self, uow):
|
||||
for mapper in self._mappers(uow):
|
||||
for state in uow.mappers[mapper]:
|
||||
(isdelete, listonly) = uow.states[state]
|
||||
if isdelete == self.isdelete and not listonly:
|
||||
yield state
|
||||
|
||||
|
||||
class PostUpdateAll(PostSortRec):
|
||||
__slots__ = "mapper", "isdelete", "sort_key"
|
||||
|
||||
def __init__(self, uow, mapper, isdelete):
|
||||
self.mapper = mapper
|
||||
self.isdelete = isdelete
|
||||
self.sort_key = ("PostUpdateAll", mapper._sort_key, isdelete)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.persistence")
|
||||
def execute(self, uow):
|
||||
persistence = util.preloaded.orm_persistence
|
||||
states, cols = uow.post_update_states[self.mapper]
|
||||
states = [s for s in states if uow.states[s][0] == self.isdelete]
|
||||
|
||||
persistence.post_update(self.mapper, states, uow, cols)
|
||||
|
||||
|
||||
class SaveUpdateAll(PostSortRec):
|
||||
__slots__ = ("mapper", "sort_key")
|
||||
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
self.sort_key = ("SaveUpdateAll", mapper._sort_key)
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.persistence")
|
||||
def execute(self, uow):
|
||||
util.preloaded.orm_persistence.save_obj(
|
||||
self.mapper,
|
||||
uow.states_for_mapper_hierarchy(self.mapper, False, False),
|
||||
uow,
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(
|
||||
uow.states_for_mapper_hierarchy(self.mapper, False, False)
|
||||
)
|
||||
base_mapper = self.mapper.base_mapper
|
||||
delete_all = DeleteAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = SaveUpdateState(uow, state)
|
||||
uow.dependencies.add((action, delete_all))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, False)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, self.mapper)
|
||||
|
||||
|
||||
class DeleteAll(PostSortRec):
|
||||
__slots__ = ("mapper", "sort_key")
|
||||
|
||||
def __init__(self, uow, mapper):
|
||||
self.mapper = mapper
|
||||
self.sort_key = ("DeleteAll", mapper._sort_key)
|
||||
assert mapper is mapper.base_mapper
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.persistence")
|
||||
def execute(self, uow):
|
||||
util.preloaded.orm_persistence.delete_obj(
|
||||
self.mapper,
|
||||
uow.states_for_mapper_hierarchy(self.mapper, True, False),
|
||||
uow,
|
||||
)
|
||||
|
||||
def per_state_flush_actions(self, uow):
|
||||
states = list(
|
||||
uow.states_for_mapper_hierarchy(self.mapper, True, False)
|
||||
)
|
||||
base_mapper = self.mapper.base_mapper
|
||||
save_all = SaveUpdateAll(uow, base_mapper)
|
||||
for state in states:
|
||||
# keep saves before deletes -
|
||||
# this ensures 'row switch' operations work
|
||||
action = DeleteState(uow, state)
|
||||
uow.dependencies.add((save_all, action))
|
||||
yield action
|
||||
|
||||
for dep in uow.deps[self.mapper]:
|
||||
states_for_prop = uow.filter_states_for_dep(dep, states)
|
||||
dep.per_state_flush_actions(uow, states_for_prop, True)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, self.mapper)
|
||||
|
||||
|
||||
class ProcessState(PostSortRec):
|
||||
__slots__ = "dependency_processor", "isdelete", "state", "sort_key"
|
||||
|
||||
def __init__(self, uow, dependency_processor, isdelete, state):
|
||||
self.dependency_processor = dependency_processor
|
||||
self.sort_key = ("ProcessState", dependency_processor.sort_key)
|
||||
self.isdelete = isdelete
|
||||
self.state = state
|
||||
|
||||
def execute_aggregate(self, uow, recs):
|
||||
cls_ = self.__class__
|
||||
dependency_processor = self.dependency_processor
|
||||
isdelete = self.isdelete
|
||||
our_recs = [
|
||||
r
|
||||
for r in recs
|
||||
if r.__class__ is cls_
|
||||
and r.dependency_processor is dependency_processor
|
||||
and r.isdelete is isdelete
|
||||
]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
if isdelete:
|
||||
dependency_processor.process_deletes(uow, states)
|
||||
else:
|
||||
dependency_processor.process_saves(uow, states)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s, %s, delete=%s)" % (
|
||||
self.__class__.__name__,
|
||||
self.dependency_processor,
|
||||
orm_util.state_str(self.state),
|
||||
self.isdelete,
|
||||
)
|
||||
|
||||
|
||||
class SaveUpdateState(PostSortRec):
|
||||
__slots__ = "state", "mapper", "sort_key"
|
||||
|
||||
def __init__(self, uow, state):
|
||||
self.state = state
|
||||
self.mapper = state.mapper.base_mapper
|
||||
self.sort_key = ("ProcessState", self.mapper._sort_key)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.persistence")
|
||||
def execute_aggregate(self, uow, recs):
|
||||
persistence = util.preloaded.orm_persistence
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [
|
||||
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
|
||||
]
|
||||
recs.difference_update(our_recs)
|
||||
persistence.save_obj(
|
||||
mapper, [self.state] + [r.state for r in our_recs], uow
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state),
|
||||
)
|
||||
|
||||
|
||||
class DeleteState(PostSortRec):
|
||||
__slots__ = "state", "mapper", "sort_key"
|
||||
|
||||
def __init__(self, uow, state):
|
||||
self.state = state
|
||||
self.mapper = state.mapper.base_mapper
|
||||
self.sort_key = ("DeleteState", self.mapper._sort_key)
|
||||
|
||||
@util.preload_module("sqlalchemy.orm.persistence")
|
||||
def execute_aggregate(self, uow, recs):
|
||||
persistence = util.preloaded.orm_persistence
|
||||
cls_ = self.__class__
|
||||
mapper = self.mapper
|
||||
our_recs = [
|
||||
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
|
||||
]
|
||||
recs.difference_update(our_recs)
|
||||
states = [self.state] + [r.state for r in our_recs]
|
||||
persistence.delete_obj(
|
||||
mapper, [s for s in states if uow.states[s][0]], uow
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (
|
||||
self.__class__.__name__,
|
||||
orm_util.state_str(self.state),
|
||||
)
|
2403
venv/lib/python3.11/site-packages/sqlalchemy/orm/util.py
Normal file
2403
venv/lib/python3.11/site-packages/sqlalchemy/orm/util.py
Normal file
File diff suppressed because it is too large
Load Diff
678
venv/lib/python3.11/site-packages/sqlalchemy/orm/writeonly.py
Normal file
678
venv/lib/python3.11/site-packages/sqlalchemy/orm/writeonly.py
Normal file
@ -0,0 +1,678 @@
|
||||
# orm/writeonly.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Write-only collection API.
|
||||
|
||||
This is an alternate mapped attribute style that only supports single-item
|
||||
collection mutation operations. To read the collection, a select()
|
||||
object must be executed each time.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy.sql import bindparam
|
||||
from . import attributes
|
||||
from . import interfaces
|
||||
from . import relationships
|
||||
from . import strategies
|
||||
from .base import NEVER_SET
|
||||
from .base import object_mapper
|
||||
from .base import PassiveFlag
|
||||
from .base import RelationshipDirection
|
||||
from .. import exc
|
||||
from .. import inspect
|
||||
from .. import log
|
||||
from .. import util
|
||||
from ..sql import delete
|
||||
from ..sql import insert
|
||||
from ..sql import select
|
||||
from ..sql import update
|
||||
from ..sql.dml import Delete
|
||||
from ..sql.dml import Insert
|
||||
from ..sql.dml import Update
|
||||
from ..util.typing import Literal
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import QueryableAttribute
|
||||
from ._typing import _InstanceDict
|
||||
from .attributes import AttributeEventToken
|
||||
from .base import LoaderCallableStatus
|
||||
from .collections import _AdaptedCollectionProtocol
|
||||
from .collections import CollectionAdapter
|
||||
from .mapper import Mapper
|
||||
from .relationships import _RelationshipOrderByArg
|
||||
from .state import InstanceState
|
||||
from .util import AliasedClass
|
||||
from ..event import _Dispatch
|
||||
from ..sql.selectable import FromClause
|
||||
from ..sql.selectable import Select
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
|
||||
class WriteOnlyHistory(Generic[_T]):
|
||||
"""Overrides AttributeHistory to receive append/remove events directly."""
|
||||
|
||||
unchanged_items: util.OrderedIdentitySet
|
||||
added_items: util.OrderedIdentitySet
|
||||
deleted_items: util.OrderedIdentitySet
|
||||
_reconcile_collection: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
attr: WriteOnlyAttributeImpl,
|
||||
state: InstanceState[_T],
|
||||
passive: PassiveFlag,
|
||||
apply_to: Optional[WriteOnlyHistory[_T]] = None,
|
||||
) -> None:
|
||||
if apply_to:
|
||||
if passive & PassiveFlag.SQL_OK:
|
||||
raise exc.InvalidRequestError(
|
||||
f"Attribute {attr} can't load the existing state from the "
|
||||
"database for this operation; full iteration is not "
|
||||
"permitted. If this is a delete operation, configure "
|
||||
f"passive_deletes=True on the {attr} relationship in "
|
||||
"order to resolve this error."
|
||||
)
|
||||
|
||||
self.unchanged_items = apply_to.unchanged_items
|
||||
self.added_items = apply_to.added_items
|
||||
self.deleted_items = apply_to.deleted_items
|
||||
self._reconcile_collection = apply_to._reconcile_collection
|
||||
else:
|
||||
self.deleted_items = util.OrderedIdentitySet()
|
||||
self.added_items = util.OrderedIdentitySet()
|
||||
self.unchanged_items = util.OrderedIdentitySet()
|
||||
self._reconcile_collection = False
|
||||
|
||||
@property
|
||||
def added_plus_unchanged(self) -> List[_T]:
|
||||
return list(self.added_items.union(self.unchanged_items))
|
||||
|
||||
@property
|
||||
def all_items(self) -> List[_T]:
|
||||
return list(
|
||||
self.added_items.union(self.unchanged_items).union(
|
||||
self.deleted_items
|
||||
)
|
||||
)
|
||||
|
||||
def as_history(self) -> attributes.History:
|
||||
if self._reconcile_collection:
|
||||
added = self.added_items.difference(self.unchanged_items)
|
||||
deleted = self.deleted_items.intersection(self.unchanged_items)
|
||||
unchanged = self.unchanged_items.difference(deleted)
|
||||
else:
|
||||
added, unchanged, deleted = (
|
||||
self.added_items,
|
||||
self.unchanged_items,
|
||||
self.deleted_items,
|
||||
)
|
||||
return attributes.History(list(added), list(unchanged), list(deleted))
|
||||
|
||||
def indexed(self, index: Union[int, slice]) -> Union[List[_T], _T]:
|
||||
return list(self.added_items)[index]
|
||||
|
||||
def add_added(self, value: _T) -> None:
|
||||
self.added_items.add(value)
|
||||
|
||||
def add_removed(self, value: _T) -> None:
|
||||
if value in self.added_items:
|
||||
self.added_items.remove(value)
|
||||
else:
|
||||
self.deleted_items.add(value)
|
||||
|
||||
|
||||
class WriteOnlyAttributeImpl(
|
||||
attributes.HasCollectionAdapter, attributes.AttributeImpl
|
||||
):
|
||||
uses_objects: bool = True
|
||||
default_accepts_scalar_loader: bool = False
|
||||
supports_population: bool = False
|
||||
_supports_dynamic_iteration: bool = False
|
||||
collection: bool = False
|
||||
dynamic: bool = True
|
||||
order_by: _RelationshipOrderByArg = ()
|
||||
collection_history_cls: Type[WriteOnlyHistory[Any]] = WriteOnlyHistory
|
||||
|
||||
query_class: Type[WriteOnlyCollection[Any]]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
class_: Union[Type[Any], AliasedClass[Any]],
|
||||
key: str,
|
||||
dispatch: _Dispatch[QueryableAttribute[Any]],
|
||||
target_mapper: Mapper[_T],
|
||||
order_by: _RelationshipOrderByArg,
|
||||
**kw: Any,
|
||||
):
|
||||
super().__init__(class_, key, None, dispatch, **kw)
|
||||
self.target_mapper = target_mapper
|
||||
self.query_class = WriteOnlyCollection
|
||||
if order_by:
|
||||
self.order_by = tuple(order_by)
|
||||
|
||||
def get(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
|
||||
) -> Union[util.OrderedIdentitySet, WriteOnlyCollection[Any]]:
|
||||
if not passive & PassiveFlag.SQL_OK:
|
||||
return self._get_collection_history(
|
||||
state, PassiveFlag.PASSIVE_NO_INITIALIZE
|
||||
).added_items
|
||||
else:
|
||||
return self.query_class(self, state)
|
||||
|
||||
@overload
|
||||
def get_collection(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
user_data: Literal[None] = ...,
|
||||
passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
|
||||
) -> CollectionAdapter: ...
|
||||
|
||||
@overload
|
||||
def get_collection(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
user_data: _AdaptedCollectionProtocol = ...,
|
||||
passive: PassiveFlag = ...,
|
||||
) -> CollectionAdapter: ...
|
||||
|
||||
@overload
|
||||
def get_collection(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
user_data: Optional[_AdaptedCollectionProtocol] = ...,
|
||||
passive: PassiveFlag = ...,
|
||||
) -> Union[
|
||||
Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
|
||||
]: ...
|
||||
|
||||
def get_collection(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
user_data: Optional[_AdaptedCollectionProtocol] = None,
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
|
||||
) -> Union[
|
||||
Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
|
||||
]:
|
||||
data: Collection[Any]
|
||||
if not passive & PassiveFlag.SQL_OK:
|
||||
data = self._get_collection_history(state, passive).added_items
|
||||
else:
|
||||
history = self._get_collection_history(state, passive)
|
||||
data = history.added_plus_unchanged
|
||||
return DynamicCollectionAdapter(data) # type: ignore[return-value]
|
||||
|
||||
@util.memoized_property
|
||||
def _append_token( # type:ignore[override]
|
||||
self,
|
||||
) -> attributes.AttributeEventToken:
|
||||
return attributes.AttributeEventToken(self, attributes.OP_APPEND)
|
||||
|
||||
@util.memoized_property
|
||||
def _remove_token( # type:ignore[override]
|
||||
self,
|
||||
) -> attributes.AttributeEventToken:
|
||||
return attributes.AttributeEventToken(self, attributes.OP_REMOVE)
|
||||
|
||||
def fire_append_event(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken],
|
||||
collection_history: Optional[WriteOnlyHistory[Any]] = None,
|
||||
) -> None:
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_added(value)
|
||||
|
||||
for fn in self.dispatch.append:
|
||||
value = fn(state, value, initiator or self._append_token)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, True)
|
||||
|
||||
def fire_remove_event(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken],
|
||||
collection_history: Optional[WriteOnlyHistory[Any]] = None,
|
||||
) -> None:
|
||||
if collection_history is None:
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
|
||||
collection_history.add_removed(value)
|
||||
|
||||
if self.trackparent and value is not None:
|
||||
self.sethasparent(attributes.instance_state(value), state, False)
|
||||
|
||||
for fn in self.dispatch.remove:
|
||||
fn(state, value, initiator or self._remove_token)
|
||||
|
||||
def _modified_event(
|
||||
self, state: InstanceState[Any], dict_: _InstanceDict
|
||||
) -> WriteOnlyHistory[Any]:
|
||||
if self.key not in state.committed_state:
|
||||
state.committed_state[self.key] = self.collection_history_cls(
|
||||
self, state, PassiveFlag.PASSIVE_NO_FETCH
|
||||
)
|
||||
|
||||
state._modified_event(dict_, self, NEVER_SET)
|
||||
|
||||
# this is a hack to allow the entities.ComparableEntity fixture
|
||||
# to work
|
||||
dict_[self.key] = True
|
||||
return state.committed_state[self.key] # type: ignore[no-any-return]
|
||||
|
||||
def set(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken] = None,
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
|
||||
check_old: Any = None,
|
||||
pop: bool = False,
|
||||
_adapt: bool = True,
|
||||
) -> None:
|
||||
if initiator and initiator.parent_token is self.parent_token:
|
||||
return
|
||||
|
||||
if pop and value is None:
|
||||
return
|
||||
|
||||
iterable = value
|
||||
new_values = list(iterable)
|
||||
if state.has_identity:
|
||||
if not self._supports_dynamic_iteration:
|
||||
raise exc.InvalidRequestError(
|
||||
f'Collection "{self}" does not support implicit '
|
||||
"iteration; collection replacement operations "
|
||||
"can't be used"
|
||||
)
|
||||
old_collection = util.IdentitySet(
|
||||
self.get(state, dict_, passive=passive)
|
||||
)
|
||||
|
||||
collection_history = self._modified_event(state, dict_)
|
||||
if not state.has_identity:
|
||||
old_collection = collection_history.added_items
|
||||
else:
|
||||
old_collection = old_collection.union(
|
||||
collection_history.added_items
|
||||
)
|
||||
|
||||
constants = old_collection.intersection(new_values)
|
||||
additions = util.IdentitySet(new_values).difference(constants)
|
||||
removals = old_collection.difference(constants)
|
||||
|
||||
for member in new_values:
|
||||
if member in additions:
|
||||
self.fire_append_event(
|
||||
state,
|
||||
dict_,
|
||||
member,
|
||||
None,
|
||||
collection_history=collection_history,
|
||||
)
|
||||
|
||||
for member in removals:
|
||||
self.fire_remove_event(
|
||||
state,
|
||||
dict_,
|
||||
member,
|
||||
None,
|
||||
collection_history=collection_history,
|
||||
)
|
||||
|
||||
def delete(self, *args: Any, **kwargs: Any) -> NoReturn:
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_committed_value(
|
||||
self, state: InstanceState[Any], dict_: _InstanceDict, value: Any
|
||||
) -> NoReturn:
|
||||
raise NotImplementedError(
|
||||
"Dynamic attributes don't support collection population."
|
||||
)
|
||||
|
||||
def get_history(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
|
||||
) -> attributes.History:
|
||||
c = self._get_collection_history(state, passive)
|
||||
return c.as_history()
|
||||
|
||||
def get_all_pending(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
) -> List[Tuple[InstanceState[Any], Any]]:
|
||||
c = self._get_collection_history(state, passive)
|
||||
return [(attributes.instance_state(x), x) for x in c.all_items]
|
||||
|
||||
def _get_collection_history(
|
||||
self, state: InstanceState[Any], passive: PassiveFlag
|
||||
) -> WriteOnlyHistory[Any]:
|
||||
c: WriteOnlyHistory[Any]
|
||||
if self.key in state.committed_state:
|
||||
c = state.committed_state[self.key]
|
||||
else:
|
||||
c = self.collection_history_cls(
|
||||
self, state, PassiveFlag.PASSIVE_NO_FETCH
|
||||
)
|
||||
|
||||
if state.has_identity and (passive & PassiveFlag.INIT_OK):
|
||||
return self.collection_history_cls(
|
||||
self, state, passive, apply_to=c
|
||||
)
|
||||
else:
|
||||
return c
|
||||
|
||||
def append(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken],
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
|
||||
) -> None:
|
||||
if initiator is not self:
|
||||
self.fire_append_event(state, dict_, value, initiator)
|
||||
|
||||
def remove(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken],
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
|
||||
) -> None:
|
||||
if initiator is not self:
|
||||
self.fire_remove_event(state, dict_, value, initiator)
|
||||
|
||||
def pop(
|
||||
self,
|
||||
state: InstanceState[Any],
|
||||
dict_: _InstanceDict,
|
||||
value: Any,
|
||||
initiator: Optional[AttributeEventToken],
|
||||
passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
|
||||
) -> None:
|
||||
self.remove(state, dict_, value, initiator, passive=passive)
|
||||
|
||||
|
||||
@log.class_logger
|
||||
@relationships.RelationshipProperty.strategy_for(lazy="write_only")
|
||||
class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified):
|
||||
impl_class = WriteOnlyAttributeImpl
|
||||
|
||||
def init_class_attribute(self, mapper: Mapper[Any]) -> None:
|
||||
self.is_class_level = True
|
||||
if not self.uselist or self.parent_property.direction not in (
|
||||
interfaces.ONETOMANY,
|
||||
interfaces.MANYTOMANY,
|
||||
):
|
||||
raise exc.InvalidRequestError(
|
||||
"On relationship %s, 'dynamic' loaders cannot be used with "
|
||||
"many-to-one/one-to-one relationships and/or "
|
||||
"uselist=False." % self.parent_property
|
||||
)
|
||||
|
||||
strategies._register_attribute( # type: ignore[no-untyped-call]
|
||||
self.parent_property,
|
||||
mapper,
|
||||
useobject=True,
|
||||
impl_class=self.impl_class,
|
||||
target_mapper=self.parent_property.mapper,
|
||||
order_by=self.parent_property.order_by,
|
||||
query_class=self.parent_property.query_class,
|
||||
)
|
||||
|
||||
|
||||
class DynamicCollectionAdapter:
|
||||
"""simplified CollectionAdapter for internal API consistency"""
|
||||
|
||||
data: Collection[Any]
|
||||
|
||||
def __init__(self, data: Collection[Any]):
|
||||
self.data = data
|
||||
|
||||
def __iter__(self) -> Iterator[Any]:
|
||||
return iter(self.data)
|
||||
|
||||
def _reset_empty(self) -> None:
|
||||
pass
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.data)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class AbstractCollectionWriter(Generic[_T]):
|
||||
"""Virtual collection which includes append/remove methods that synchronize
|
||||
into the attribute event system.
|
||||
|
||||
"""
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__slots__ = ()
|
||||
|
||||
instance: _T
|
||||
_from_obj: Tuple[FromClause, ...]
|
||||
|
||||
def __init__(self, attr: WriteOnlyAttributeImpl, state: InstanceState[_T]):
|
||||
instance = state.obj()
|
||||
if TYPE_CHECKING:
|
||||
assert instance
|
||||
self.instance = instance
|
||||
self.attr = attr
|
||||
|
||||
mapper = object_mapper(instance)
|
||||
prop = mapper._props[self.attr.key]
|
||||
|
||||
if prop.secondary is not None:
|
||||
# this is a hack right now. The Query only knows how to
|
||||
# make subsequent joins() without a given left-hand side
|
||||
# from self._from_obj[0]. We need to ensure prop.secondary
|
||||
# is in the FROM. So we purposely put the mapper selectable
|
||||
# in _from_obj[0] to ensure a user-defined join() later on
|
||||
# doesn't fail, and secondary is then in _from_obj[1].
|
||||
|
||||
# note also, we are using the official ORM-annotated selectable
|
||||
# from __clause_element__(), see #7868
|
||||
self._from_obj = (prop.mapper.__clause_element__(), prop.secondary)
|
||||
else:
|
||||
self._from_obj = ()
|
||||
|
||||
self._where_criteria = (
|
||||
prop._with_parent(instance, alias_secondary=False),
|
||||
)
|
||||
|
||||
if self.attr.order_by:
|
||||
self._order_by_clauses = self.attr.order_by
|
||||
else:
|
||||
self._order_by_clauses = ()
|
||||
|
||||
def _add_all_impl(self, iterator: Iterable[_T]) -> None:
|
||||
for item in iterator:
|
||||
self.attr.append(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance),
|
||||
item,
|
||||
None,
|
||||
)
|
||||
|
||||
def _remove_impl(self, item: _T) -> None:
|
||||
self.attr.remove(
|
||||
attributes.instance_state(self.instance),
|
||||
attributes.instance_dict(self.instance),
|
||||
item,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
class WriteOnlyCollection(AbstractCollectionWriter[_T]):
|
||||
"""Write-only collection which can synchronize changes into the
|
||||
attribute event system.
|
||||
|
||||
The :class:`.WriteOnlyCollection` is used in a mapping by
|
||||
using the ``"write_only"`` lazy loading strategy with
|
||||
:func:`_orm.relationship`. For background on this configuration,
|
||||
see :ref:`write_only_relationship`.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`write_only_relationship`
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"instance",
|
||||
"attr",
|
||||
"_where_criteria",
|
||||
"_from_obj",
|
||||
"_order_by_clauses",
|
||||
)
|
||||
|
||||
def __iter__(self) -> NoReturn:
|
||||
raise TypeError(
|
||||
"WriteOnly collections don't support iteration in-place; "
|
||||
"to query for collection items, use the select() method to "
|
||||
"produce a SQL statement and execute it with session.scalars()."
|
||||
)
|
||||
|
||||
def select(self) -> Select[Tuple[_T]]:
|
||||
"""Produce a :class:`_sql.Select` construct that represents the
|
||||
rows within this instance-local :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
"""
|
||||
stmt = select(self.attr.target_mapper).where(*self._where_criteria)
|
||||
if self._from_obj:
|
||||
stmt = stmt.select_from(*self._from_obj)
|
||||
if self._order_by_clauses:
|
||||
stmt = stmt.order_by(*self._order_by_clauses)
|
||||
return stmt
|
||||
|
||||
def insert(self) -> Insert:
|
||||
"""For one-to-many collections, produce a :class:`_dml.Insert` which
|
||||
will insert new rows in terms of this this instance-local
|
||||
:class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
This construct is only supported for a :class:`_orm.Relationship`
|
||||
that does **not** include the :paramref:`_orm.relationship.secondary`
|
||||
parameter. For relationships that refer to a many-to-many table,
|
||||
use ordinary bulk insert techniques to produce new objects, then
|
||||
use :meth:`_orm.AbstractCollectionWriter.add_all` to associate them
|
||||
with the collection.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
state = inspect(self.instance)
|
||||
mapper = state.mapper
|
||||
prop = mapper._props[self.attr.key]
|
||||
|
||||
if prop.direction is not RelationshipDirection.ONETOMANY:
|
||||
raise exc.InvalidRequestError(
|
||||
"Write only bulk INSERT only supported for one-to-many "
|
||||
"collections; for many-to-many, use a separate bulk "
|
||||
"INSERT along with add_all()."
|
||||
)
|
||||
|
||||
dict_: Dict[str, Any] = {}
|
||||
|
||||
for l, r in prop.synchronize_pairs:
|
||||
fn = prop._get_attr_w_warn_on_none(
|
||||
mapper,
|
||||
state,
|
||||
state.dict,
|
||||
l,
|
||||
)
|
||||
|
||||
dict_[r.key] = bindparam(None, callable_=fn)
|
||||
|
||||
return insert(self.attr.target_mapper).values(**dict_)
|
||||
|
||||
def update(self) -> Update:
|
||||
"""Produce a :class:`_dml.Update` which will refer to rows in terms
|
||||
of this instance-local :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
"""
|
||||
return update(self.attr.target_mapper).where(*self._where_criteria)
|
||||
|
||||
def delete(self) -> Delete:
|
||||
"""Produce a :class:`_dml.Delete` which will refer to rows in terms
|
||||
of this instance-local :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
"""
|
||||
return delete(self.attr.target_mapper).where(*self._where_criteria)
|
||||
|
||||
def add_all(self, iterator: Iterable[_T]) -> None:
|
||||
"""Add an iterable of items to this :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
The given items will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
"""
|
||||
self._add_all_impl(iterator)
|
||||
|
||||
def add(self, item: _T) -> None:
|
||||
"""Add an item to this :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
The given item will be persisted to the database in terms of
|
||||
the parent instance's collection on the next flush.
|
||||
|
||||
"""
|
||||
self._add_all_impl([item])
|
||||
|
||||
def remove(self, item: _T) -> None:
|
||||
"""Remove an item from this :class:`_orm.WriteOnlyCollection`.
|
||||
|
||||
The given item will be removed from the parent instance's collection on
|
||||
the next flush.
|
||||
|
||||
"""
|
||||
self._remove_impl(item)
|
Reference in New Issue
Block a user