Update 2025-04-13_16:25:39

This commit is contained in:
root
2025-04-13 16:25:41 +02:00
commit 4c711360d3
2979 changed files with 666585 additions and 0 deletions

View File

@ -0,0 +1,143 @@
__version__ = "0.0.24"
# Re-export from SQLAlchemy
from sqlalchemy.engine import create_engine as create_engine
from sqlalchemy.engine import create_mock_engine as create_mock_engine
from sqlalchemy.engine import engine_from_config as engine_from_config
from sqlalchemy.inspection import inspect as inspect
from sqlalchemy.pool import QueuePool as QueuePool
from sqlalchemy.pool import StaticPool as StaticPool
from sqlalchemy.schema import BLANK_SCHEMA as BLANK_SCHEMA
from sqlalchemy.schema import DDL as DDL
from sqlalchemy.schema import CheckConstraint as CheckConstraint
from sqlalchemy.schema import Column as Column
from sqlalchemy.schema import ColumnDefault as ColumnDefault
from sqlalchemy.schema import Computed as Computed
from sqlalchemy.schema import Constraint as Constraint
from sqlalchemy.schema import DefaultClause as DefaultClause
from sqlalchemy.schema import FetchedValue as FetchedValue
from sqlalchemy.schema import ForeignKey as ForeignKey
from sqlalchemy.schema import ForeignKeyConstraint as ForeignKeyConstraint
from sqlalchemy.schema import Identity as Identity
from sqlalchemy.schema import Index as Index
from sqlalchemy.schema import MetaData as MetaData
from sqlalchemy.schema import PrimaryKeyConstraint as PrimaryKeyConstraint
from sqlalchemy.schema import Sequence as Sequence
from sqlalchemy.schema import Table as Table
from sqlalchemy.schema import UniqueConstraint as UniqueConstraint
from sqlalchemy.sql import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT
from sqlalchemy.sql import (
LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY,
)
from sqlalchemy.sql import LABEL_STYLE_NONE as LABEL_STYLE_NONE
from sqlalchemy.sql import (
LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL,
)
from sqlalchemy.sql import alias as alias
from sqlalchemy.sql import bindparam as bindparam
from sqlalchemy.sql import column as column
from sqlalchemy.sql import delete as delete
from sqlalchemy.sql import except_ as except_
from sqlalchemy.sql import except_all as except_all
from sqlalchemy.sql import exists as exists
from sqlalchemy.sql import false as false
from sqlalchemy.sql import func as func
from sqlalchemy.sql import insert as insert
from sqlalchemy.sql import intersect as intersect
from sqlalchemy.sql import intersect_all as intersect_all
from sqlalchemy.sql import join as join
from sqlalchemy.sql import lambda_stmt as lambda_stmt
from sqlalchemy.sql import lateral as lateral
from sqlalchemy.sql import literal as literal
from sqlalchemy.sql import literal_column as literal_column
from sqlalchemy.sql import modifier as modifier
from sqlalchemy.sql import null as null
from sqlalchemy.sql import nullsfirst as nullsfirst
from sqlalchemy.sql import nullslast as nullslast
from sqlalchemy.sql import outerjoin as outerjoin
from sqlalchemy.sql import outparam as outparam
from sqlalchemy.sql import table as table
from sqlalchemy.sql import tablesample as tablesample
from sqlalchemy.sql import text as text
from sqlalchemy.sql import true as true
from sqlalchemy.sql import union as union
from sqlalchemy.sql import union_all as union_all
from sqlalchemy.sql import update as update
from sqlalchemy.sql import values as values
from sqlalchemy.types import ARRAY as ARRAY
from sqlalchemy.types import BIGINT as BIGINT
from sqlalchemy.types import BINARY as BINARY
from sqlalchemy.types import BLOB as BLOB
from sqlalchemy.types import BOOLEAN as BOOLEAN
from sqlalchemy.types import CHAR as CHAR
from sqlalchemy.types import CLOB as CLOB
from sqlalchemy.types import DATE as DATE
from sqlalchemy.types import DATETIME as DATETIME
from sqlalchemy.types import DECIMAL as DECIMAL
from sqlalchemy.types import DOUBLE as DOUBLE
from sqlalchemy.types import DOUBLE_PRECISION as DOUBLE_PRECISION
from sqlalchemy.types import FLOAT as FLOAT
from sqlalchemy.types import INT as INT
from sqlalchemy.types import INTEGER as INTEGER
from sqlalchemy.types import JSON as JSON
from sqlalchemy.types import NCHAR as NCHAR
from sqlalchemy.types import NUMERIC as NUMERIC
from sqlalchemy.types import NVARCHAR as NVARCHAR
from sqlalchemy.types import REAL as REAL
from sqlalchemy.types import SMALLINT as SMALLINT
from sqlalchemy.types import TEXT as TEXT
from sqlalchemy.types import TIME as TIME
from sqlalchemy.types import TIMESTAMP as TIMESTAMP
from sqlalchemy.types import UUID as UUID
from sqlalchemy.types import VARBINARY as VARBINARY
from sqlalchemy.types import VARCHAR as VARCHAR
from sqlalchemy.types import BigInteger as BigInteger
from sqlalchemy.types import Boolean as Boolean
from sqlalchemy.types import Date as Date
from sqlalchemy.types import DateTime as DateTime
from sqlalchemy.types import Double as Double
from sqlalchemy.types import Enum as Enum
from sqlalchemy.types import Float as Float
from sqlalchemy.types import Integer as Integer
from sqlalchemy.types import Interval as Interval
from sqlalchemy.types import LargeBinary as LargeBinary
from sqlalchemy.types import Numeric as Numeric
from sqlalchemy.types import PickleType as PickleType
from sqlalchemy.types import SmallInteger as SmallInteger
from sqlalchemy.types import String as String
from sqlalchemy.types import Text as Text
from sqlalchemy.types import Time as Time
from sqlalchemy.types import TupleType as TupleType
from sqlalchemy.types import TypeDecorator as TypeDecorator
from sqlalchemy.types import Unicode as Unicode
from sqlalchemy.types import UnicodeText as UnicodeText
from sqlalchemy.types import Uuid as Uuid
# From SQLModel, modifications of SQLAlchemy or equivalents of Pydantic
from .main import Field as Field
from .main import Relationship as Relationship
from .main import SQLModel as SQLModel
from .orm.session import Session as Session
from .sql.expression import all_ as all_
from .sql.expression import and_ as and_
from .sql.expression import any_ as any_
from .sql.expression import asc as asc
from .sql.expression import between as between
from .sql.expression import case as case
from .sql.expression import cast as cast
from .sql.expression import col as col
from .sql.expression import collate as collate
from .sql.expression import desc as desc
from .sql.expression import distinct as distinct
from .sql.expression import extract as extract
from .sql.expression import funcfilter as funcfilter
from .sql.expression import not_ as not_
from .sql.expression import nulls_first as nulls_first
from .sql.expression import nulls_last as nulls_last
from .sql.expression import or_ as or_
from .sql.expression import over as over
from .sql.expression import select as select
from .sql.expression import tuple_ as tuple_
from .sql.expression import type_coerce as type_coerce
from .sql.expression import within_group as within_group
from .sql.sqltypes import AutoString as AutoString

View File

@ -0,0 +1,576 @@
import types
from contextlib import contextmanager
from contextvars import ContextVar
from dataclasses import dataclass
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Callable,
Dict,
ForwardRef,
Generator,
Mapping,
Optional,
Set,
Type,
TypeVar,
Union,
)
from pydantic import VERSION as P_VERSION
from pydantic import BaseModel
from pydantic.fields import FieldInfo
from typing_extensions import Annotated, get_args, get_origin
# Reassign variable to make it reexported for mypy
PYDANTIC_VERSION = P_VERSION
PYDANTIC_MINOR_VERSION = tuple(int(i) for i in P_VERSION.split(".")[:2])
IS_PYDANTIC_V2 = PYDANTIC_MINOR_VERSION[0] == 2
if TYPE_CHECKING:
from .main import RelationshipInfo, SQLModel
UnionType = getattr(types, "UnionType", Union)
NoneType = type(None)
T = TypeVar("T")
InstanceOrType = Union[T, Type[T]]
_TSQLModel = TypeVar("_TSQLModel", bound="SQLModel")
class FakeMetadata:
max_length: Optional[int] = None
max_digits: Optional[int] = None
decimal_places: Optional[int] = None
@dataclass
class ObjectWithUpdateWrapper:
obj: Any
update: Dict[str, Any]
def __getattribute__(self, __name: str) -> Any:
update = super().__getattribute__("update")
obj = super().__getattribute__("obj")
if __name in update:
return update[__name]
return getattr(obj, __name)
def _is_union_type(t: Any) -> bool:
return t is UnionType or t is Union
finish_init: ContextVar[bool] = ContextVar("finish_init", default=True)
@contextmanager
def partial_init() -> Generator[None, None, None]:
token = finish_init.set(False)
yield
finish_init.reset(token)
if IS_PYDANTIC_V2:
from annotated_types import MaxLen
from pydantic import ConfigDict as BaseConfig
from pydantic._internal._fields import PydanticMetadata
from pydantic._internal._model_construction import ModelMetaclass
from pydantic._internal._repr import Representation as Representation
from pydantic_core import PydanticUndefined as Undefined
from pydantic_core import PydanticUndefinedType as UndefinedType
# Dummy for types, to make it importable
class ModelField:
pass
class SQLModelConfig(BaseConfig, total=False):
table: Optional[bool]
registry: Optional[Any]
def get_config_value(
*, model: InstanceOrType["SQLModel"], parameter: str, default: Any = None
) -> Any:
return model.model_config.get(parameter, default)
def set_config_value(
*,
model: InstanceOrType["SQLModel"],
parameter: str,
value: Any,
) -> None:
model.model_config[parameter] = value # type: ignore[literal-required]
def get_model_fields(model: InstanceOrType[BaseModel]) -> Dict[str, "FieldInfo"]:
return model.model_fields
def get_fields_set(
object: InstanceOrType["SQLModel"],
) -> Union[Set[str], Callable[[BaseModel], Set[str]]]:
return object.model_fields_set
def init_pydantic_private_attrs(new_object: InstanceOrType["SQLModel"]) -> None:
object.__setattr__(new_object, "__pydantic_fields_set__", set())
object.__setattr__(new_object, "__pydantic_extra__", None)
object.__setattr__(new_object, "__pydantic_private__", None)
def get_annotations(class_dict: Dict[str, Any]) -> Dict[str, Any]:
return class_dict.get("__annotations__", {})
def is_table_model_class(cls: Type[Any]) -> bool:
config = getattr(cls, "model_config", {})
if config:
return config.get("table", False) or False
return False
def get_relationship_to(
name: str,
rel_info: "RelationshipInfo",
annotation: Any,
) -> Any:
origin = get_origin(annotation)
use_annotation = annotation
# Direct relationships (e.g. 'Team' or Team) have None as an origin
if origin is None:
if isinstance(use_annotation, ForwardRef):
use_annotation = use_annotation.__forward_arg__
else:
return use_annotation
# If Union (e.g. Optional), get the real field
elif _is_union_type(origin):
use_annotation = get_args(annotation)
if len(use_annotation) > 2:
raise ValueError(
"Cannot have a (non-optional) union as a SQLAlchemy field"
)
arg1, arg2 = use_annotation
if arg1 is NoneType and arg2 is not NoneType:
use_annotation = arg2
elif arg2 is NoneType and arg1 is not NoneType:
use_annotation = arg1
else:
raise ValueError(
"Cannot have a Union of None and None as a SQLAlchemy field"
)
# If a list, then also get the real field
elif origin is list:
use_annotation = get_args(annotation)[0]
return get_relationship_to(
name=name, rel_info=rel_info, annotation=use_annotation
)
def is_field_noneable(field: "FieldInfo") -> bool:
if getattr(field, "nullable", Undefined) is not Undefined:
return field.nullable # type: ignore
origin = get_origin(field.annotation)
if origin is not None and _is_union_type(origin):
args = get_args(field.annotation)
if any(arg is NoneType for arg in args):
return True
if not field.is_required():
if field.default is Undefined:
return False
if field.annotation is None or field.annotation is NoneType: # type: ignore[comparison-overlap]
return True
return False
return False
def get_sa_type_from_type_annotation(annotation: Any) -> Any:
# Resolve Optional fields
if annotation is None:
raise ValueError("Missing field type")
origin = get_origin(annotation)
if origin is None:
return annotation
elif origin is Annotated:
return get_sa_type_from_type_annotation(get_args(annotation)[0])
if _is_union_type(origin):
bases = get_args(annotation)
if len(bases) > 2:
raise ValueError(
"Cannot have a (non-optional) union as a SQLAlchemy field"
)
# Non optional unions are not allowed
if bases[0] is not NoneType and bases[1] is not NoneType:
raise ValueError(
"Cannot have a (non-optional) union as a SQLAlchemy field"
)
# Optional unions are allowed
use_type = bases[0] if bases[0] is not NoneType else bases[1]
return get_sa_type_from_type_annotation(use_type)
return origin
def get_sa_type_from_field(field: Any) -> Any:
type_: Any = field.annotation
return get_sa_type_from_type_annotation(type_)
def get_field_metadata(field: Any) -> Any:
for meta in field.metadata:
if isinstance(meta, (PydanticMetadata, MaxLen)):
return meta
return FakeMetadata()
def post_init_field_info(field_info: FieldInfo) -> None:
return None
# Dummy to make it importable
def _calculate_keys(
self: "SQLModel",
include: Optional[Mapping[Union[int, str], Any]],
exclude: Optional[Mapping[Union[int, str], Any]],
exclude_unset: bool,
update: Optional[Dict[str, Any]] = None,
) -> Optional[AbstractSet[str]]: # pragma: no cover
return None
def sqlmodel_table_construct(
*,
self_instance: _TSQLModel,
values: Dict[str, Any],
_fields_set: Union[Set[str], None] = None,
) -> _TSQLModel:
# Copy from Pydantic's BaseModel.construct()
# Ref: https://github.com/pydantic/pydantic/blob/v2.5.2/pydantic/main.py#L198
# Modified to not include everything, only the model fields, and to
# set relationships
# SQLModel override to get class SQLAlchemy __dict__ attributes and
# set them back in after creating the object
# new_obj = cls.__new__(cls)
cls = type(self_instance)
old_dict = self_instance.__dict__.copy()
# End SQLModel override
fields_values: Dict[str, Any] = {}
defaults: Dict[
str, Any
] = {} # keeping this separate from `fields_values` helps us compute `_fields_set`
for name, field in cls.model_fields.items():
if field.alias and field.alias in values:
fields_values[name] = values.pop(field.alias)
elif name in values:
fields_values[name] = values.pop(name)
elif not field.is_required():
defaults[name] = field.get_default(call_default_factory=True)
if _fields_set is None:
_fields_set = set(fields_values.keys())
fields_values.update(defaults)
_extra: Union[Dict[str, Any], None] = None
if cls.model_config.get("extra") == "allow":
_extra = {}
for k, v in values.items():
_extra[k] = v
# SQLModel override, do not include everything, only the model fields
# else:
# fields_values.update(values)
# End SQLModel override
# SQLModel override
# Do not set __dict__, instead use setattr to trigger SQLAlchemy
# object.__setattr__(new_obj, "__dict__", fields_values)
# instrumentation
for key, value in {**old_dict, **fields_values}.items():
setattr(self_instance, key, value)
# End SQLModel override
object.__setattr__(self_instance, "__pydantic_fields_set__", _fields_set)
if not cls.__pydantic_root_model__:
object.__setattr__(self_instance, "__pydantic_extra__", _extra)
if cls.__pydantic_post_init__:
self_instance.model_post_init(None)
elif not cls.__pydantic_root_model__:
# Note: if there are any private attributes, cls.__pydantic_post_init__ would exist
# Since it doesn't, that means that `__pydantic_private__` should be set to None
object.__setattr__(self_instance, "__pydantic_private__", None)
# SQLModel override, set relationships
# Get and set any relationship objects
for key in self_instance.__sqlmodel_relationships__:
value = values.get(key, Undefined)
if value is not Undefined:
setattr(self_instance, key, value)
# End SQLModel override
return self_instance
def sqlmodel_validate(
cls: Type[_TSQLModel],
obj: Any,
*,
strict: Union[bool, None] = None,
from_attributes: Union[bool, None] = None,
context: Union[Dict[str, Any], None] = None,
update: Union[Dict[str, Any], None] = None,
) -> _TSQLModel:
if not is_table_model_class(cls):
new_obj: _TSQLModel = cls.__new__(cls)
else:
# If table, create the new instance normally to make SQLAlchemy create
# the _sa_instance_state attribute
# The wrapper of this function should use with _partial_init()
with partial_init():
new_obj = cls()
# SQLModel Override to get class SQLAlchemy __dict__ attributes and
# set them back in after creating the object
old_dict = new_obj.__dict__.copy()
use_obj = obj
if isinstance(obj, dict) and update:
use_obj = {**obj, **update}
elif update:
use_obj = ObjectWithUpdateWrapper(obj=obj, update=update)
cls.__pydantic_validator__.validate_python(
use_obj,
strict=strict,
from_attributes=from_attributes,
context=context,
self_instance=new_obj,
)
# Capture fields set to restore it later
fields_set = new_obj.__pydantic_fields_set__.copy()
if not is_table_model_class(cls):
# If not table, normal Pydantic code, set __dict__
new_obj.__dict__ = {**old_dict, **new_obj.__dict__}
else:
# Do not set __dict__, instead use setattr to trigger SQLAlchemy
# instrumentation
for key, value in {**old_dict, **new_obj.__dict__}.items():
setattr(new_obj, key, value)
# Restore fields set
object.__setattr__(new_obj, "__pydantic_fields_set__", fields_set)
# Get and set any relationship objects
if is_table_model_class(cls):
for key in new_obj.__sqlmodel_relationships__:
value = getattr(use_obj, key, Undefined)
if value is not Undefined:
setattr(new_obj, key, value)
return new_obj
def sqlmodel_init(*, self: "SQLModel", data: Dict[str, Any]) -> None:
old_dict = self.__dict__.copy()
if not is_table_model_class(self.__class__):
self.__pydantic_validator__.validate_python(
data,
self_instance=self,
)
else:
sqlmodel_table_construct(
self_instance=self,
values=data,
)
object.__setattr__(
self,
"__dict__",
{**old_dict, **self.__dict__},
)
else:
from pydantic import BaseConfig as BaseConfig # type: ignore[assignment]
from pydantic.errors import ConfigError
from pydantic.fields import ( # type: ignore[attr-defined, no-redef]
SHAPE_SINGLETON,
ModelField,
)
from pydantic.fields import ( # type: ignore[attr-defined, no-redef]
Undefined as Undefined, # noqa
)
from pydantic.fields import ( # type: ignore[attr-defined, no-redef]
UndefinedType as UndefinedType,
)
from pydantic.main import ( # type: ignore[no-redef]
ModelMetaclass as ModelMetaclass,
)
from pydantic.main import validate_model
from pydantic.typing import resolve_annotations
from pydantic.utils import ROOT_KEY, ValueItems
from pydantic.utils import ( # type: ignore[no-redef]
Representation as Representation,
)
class SQLModelConfig(BaseConfig): # type: ignore[no-redef]
table: Optional[bool] = None # type: ignore[misc]
registry: Optional[Any] = None # type: ignore[misc]
def get_config_value(
*, model: InstanceOrType["SQLModel"], parameter: str, default: Any = None
) -> Any:
return getattr(model.__config__, parameter, default) # type: ignore[union-attr]
def set_config_value(
*,
model: InstanceOrType["SQLModel"],
parameter: str,
value: Any,
) -> None:
setattr(model.__config__, parameter, value) # type: ignore
def get_model_fields(model: InstanceOrType[BaseModel]) -> Dict[str, "FieldInfo"]:
return model.__fields__ # type: ignore
def get_fields_set(
object: InstanceOrType["SQLModel"],
) -> Union[Set[str], Callable[[BaseModel], Set[str]]]:
return object.__fields_set__
def init_pydantic_private_attrs(new_object: InstanceOrType["SQLModel"]) -> None:
object.__setattr__(new_object, "__fields_set__", set())
def get_annotations(class_dict: Dict[str, Any]) -> Dict[str, Any]:
return resolve_annotations( # type: ignore[no-any-return]
class_dict.get("__annotations__", {}),
class_dict.get("__module__", None),
)
def is_table_model_class(cls: Type[Any]) -> bool:
config = getattr(cls, "__config__", None)
if config:
return getattr(config, "table", False)
return False
def get_relationship_to(
name: str,
rel_info: "RelationshipInfo",
annotation: Any,
) -> Any:
temp_field = ModelField.infer( # type: ignore[attr-defined]
name=name,
value=rel_info,
annotation=annotation,
class_validators=None,
config=SQLModelConfig,
)
relationship_to = temp_field.type_
if isinstance(temp_field.type_, ForwardRef):
relationship_to = temp_field.type_.__forward_arg__
return relationship_to
def is_field_noneable(field: "FieldInfo") -> bool:
if not field.required: # type: ignore[attr-defined]
# Taken from [Pydantic](https://github.com/samuelcolvin/pydantic/blob/v1.8.2/pydantic/fields.py#L946-L947)
return field.allow_none and ( # type: ignore[attr-defined]
field.shape != SHAPE_SINGLETON or not field.sub_fields # type: ignore[attr-defined]
)
return field.allow_none # type: ignore[no-any-return, attr-defined]
def get_sa_type_from_field(field: Any) -> Any:
if isinstance(field.type_, type) and field.shape == SHAPE_SINGLETON:
return field.type_
raise ValueError(f"The field {field.name} has no matching SQLAlchemy type")
def get_field_metadata(field: Any) -> Any:
metadata = FakeMetadata()
metadata.max_length = field.field_info.max_length
metadata.max_digits = getattr(field.type_, "max_digits", None)
metadata.decimal_places = getattr(field.type_, "decimal_places", None)
return metadata
def post_init_field_info(field_info: FieldInfo) -> None:
field_info._validate() # type: ignore[attr-defined]
def _calculate_keys(
self: "SQLModel",
include: Optional[Mapping[Union[int, str], Any]],
exclude: Optional[Mapping[Union[int, str], Any]],
exclude_unset: bool,
update: Optional[Dict[str, Any]] = None,
) -> Optional[AbstractSet[str]]:
if include is None and exclude is None and not exclude_unset:
# Original in Pydantic:
# return None
# Updated to not return SQLAlchemy attributes
# Do not include relationships as that would easily lead to infinite
# recursion, or traversing the whole database
return (
self.__fields__.keys() # noqa
) # | self.__sqlmodel_relationships__.keys()
keys: AbstractSet[str]
if exclude_unset:
keys = self.__fields_set__.copy() # noqa
else:
# Original in Pydantic:
# keys = self.__dict__.keys()
# Updated to not return SQLAlchemy attributes
# Do not include relationships as that would easily lead to infinite
# recursion, or traversing the whole database
keys = (
self.__fields__.keys() # noqa
) # | self.__sqlmodel_relationships__.keys()
if include is not None:
keys &= include.keys()
if update:
keys -= update.keys()
if exclude:
keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)}
return keys
def sqlmodel_validate(
cls: Type[_TSQLModel],
obj: Any,
*,
strict: Union[bool, None] = None,
from_attributes: Union[bool, None] = None,
context: Union[Dict[str, Any], None] = None,
update: Union[Dict[str, Any], None] = None,
) -> _TSQLModel:
# This was SQLModel's original from_orm() for Pydantic v1
# Duplicated from Pydantic
if not cls.__config__.orm_mode: # type: ignore[attr-defined] # noqa
raise ConfigError(
"You must have the config attribute orm_mode=True to use from_orm"
)
if not isinstance(obj, Mapping):
obj = (
{ROOT_KEY: obj}
if cls.__custom_root_type__ # type: ignore[attr-defined] # noqa
else cls._decompose_class(obj) # type: ignore[attr-defined] # noqa
)
# SQLModel, support update dict
if update is not None:
obj = {**obj, **update}
# End SQLModel support dict
if not getattr(cls.__config__, "table", False): # noqa
# If not table, normal Pydantic code
m: _TSQLModel = cls.__new__(cls)
else:
# If table, create the new instance normally to make SQLAlchemy create
# the _sa_instance_state attribute
m = cls()
values, fields_set, validation_error = validate_model(cls, obj)
if validation_error:
raise validation_error
# Updated to trigger SQLAlchemy internal handling
if not getattr(cls.__config__, "table", False): # noqa
object.__setattr__(m, "__dict__", values)
else:
for key, value in values.items():
setattr(m, key, value)
# Continue with standard Pydantic logic
object.__setattr__(m, "__fields_set__", fields_set)
m._init_private_attributes() # type: ignore[attr-defined] # noqa
return m
def sqlmodel_init(*, self: "SQLModel", data: Dict[str, Any]) -> None:
values, fields_set, validation_error = validate_model(self.__class__, data)
# Only raise errors if not a SQLModel model
if (
not is_table_model_class(self.__class__) # noqa
and validation_error
):
raise validation_error
if not is_table_model_class(self.__class__):
object.__setattr__(self, "__dict__", values)
else:
# Do not set values as in Pydantic, pass them through setattr, so
# SQLAlchemy can handle them
for key, value in values.items():
setattr(self, key, value)
object.__setattr__(self, "__fields_set__", fields_set)
non_pydantic_keys = data.keys() - values.keys()
if is_table_model_class(self.__class__):
for key in non_pydantic_keys:
if key in self.__sqlmodel_relationships__:
setattr(self, key, data[key])

View File

@ -0,0 +1,32 @@
from typing import Any, TypeVar
class _DefaultPlaceholder:
"""
You shouldn't use this class directly.
It's used internally to recognize when a default value has been overwritten, even
if the overridden default value was truthy.
"""
def __init__(self, value: Any):
self.value = value
def __bool__(self) -> bool:
return bool(self.value)
def __eq__(self, o: object) -> bool:
return isinstance(o, _DefaultPlaceholder) and o.value == self.value
_TDefaultType = TypeVar("_TDefaultType")
def Default(value: _TDefaultType) -> _TDefaultType:
"""
You shouldn't use this function directly.
It's used internally to recognize when a default value has been overwritten, even
if the overridden default value was truthy.
"""
return _DefaultPlaceholder(value) # type: ignore

View File

@ -0,0 +1,150 @@
from typing import (
Any,
Dict,
Mapping,
Optional,
Sequence,
Type,
TypeVar,
Union,
cast,
overload,
)
from sqlalchemy import util
from sqlalchemy.engine.interfaces import _CoreAnyExecuteParams
from sqlalchemy.engine.result import Result, ScalarResult, TupleResult
from sqlalchemy.ext.asyncio import AsyncSession as _AsyncSession
from sqlalchemy.ext.asyncio.result import _ensure_sync_result
from sqlalchemy.ext.asyncio.session import _EXECUTE_OPTIONS
from sqlalchemy.orm._typing import OrmExecuteOptionsParameter
from sqlalchemy.sql.base import Executable as _Executable
from sqlalchemy.util.concurrency import greenlet_spawn
from typing_extensions import deprecated
from ...orm.session import Session
from ...sql.base import Executable
from ...sql.expression import Select, SelectOfScalar
_TSelectParam = TypeVar("_TSelectParam", bound=Any)
class AsyncSession(_AsyncSession):
sync_session_class: Type[Session] = Session
sync_session: Session
@overload
async def exec(
self,
statement: Select[_TSelectParam],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> TupleResult[_TSelectParam]: ...
@overload
async def exec(
self,
statement: SelectOfScalar[_TSelectParam],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> ScalarResult[_TSelectParam]: ...
async def exec(
self,
statement: Union[
Select[_TSelectParam],
SelectOfScalar[_TSelectParam],
Executable[_TSelectParam],
],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> Union[TupleResult[_TSelectParam], ScalarResult[_TSelectParam]]:
if execution_options:
execution_options = util.immutabledict(execution_options).union(
_EXECUTE_OPTIONS
)
else:
execution_options = _EXECUTE_OPTIONS
result = await greenlet_spawn(
self.sync_session.exec,
statement,
params=params,
execution_options=execution_options,
bind_arguments=bind_arguments,
_parent_execute_state=_parent_execute_state,
_add_event=_add_event,
)
result_value = await _ensure_sync_result(
cast(Result[_TSelectParam], result), self.exec
)
return result_value # type: ignore
@deprecated(
"""
🚨 You probably want to use `session.exec()` instead of `session.execute()`.
This is the original SQLAlchemy `session.execute()` method that returns objects
of type `Row`, and that you have to call `scalars()` to get the model objects.
For example:
```Python
heroes = await session.execute(select(Hero)).scalars().all()
```
instead you could use `exec()`:
```Python
heroes = await session.exec(select(Hero)).all()
```
"""
)
async def execute( # type: ignore
self,
statement: _Executable,
params: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> Result[Any]:
"""
🚨 You probably want to use `session.exec()` instead of `session.execute()`.
This is the original SQLAlchemy `session.execute()` method that returns objects
of type `Row`, and that you have to call `scalars()` to get the model objects.
For example:
```Python
heroes = await session.execute(select(Hero)).scalars().all()
```
instead you could use `exec()`:
```Python
heroes = await session.exec(select(Hero)).all()
```
"""
return await super().execute(
statement,
params=params,
execution_options=execution_options,
bind_arguments=bind_arguments,
_parent_execute_state=_parent_execute_state,
_add_event=_add_event,
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,159 @@
from typing import (
Any,
Dict,
Mapping,
Optional,
Sequence,
TypeVar,
Union,
overload,
)
from sqlalchemy import util
from sqlalchemy.engine.interfaces import _CoreAnyExecuteParams
from sqlalchemy.engine.result import Result, ScalarResult, TupleResult
from sqlalchemy.orm import Query as _Query
from sqlalchemy.orm import Session as _Session
from sqlalchemy.orm._typing import OrmExecuteOptionsParameter
from sqlalchemy.sql._typing import _ColumnsClauseArgument
from sqlalchemy.sql.base import Executable as _Executable
from sqlmodel.sql.base import Executable
from sqlmodel.sql.expression import Select, SelectOfScalar
from typing_extensions import deprecated
_TSelectParam = TypeVar("_TSelectParam", bound=Any)
class Session(_Session):
@overload
def exec(
self,
statement: Select[_TSelectParam],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> TupleResult[_TSelectParam]: ...
@overload
def exec(
self,
statement: SelectOfScalar[_TSelectParam],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> ScalarResult[_TSelectParam]: ...
def exec(
self,
statement: Union[
Select[_TSelectParam],
SelectOfScalar[_TSelectParam],
Executable[_TSelectParam],
],
*,
params: Optional[Union[Mapping[str, Any], Sequence[Mapping[str, Any]]]] = None,
execution_options: Mapping[str, Any] = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> Union[TupleResult[_TSelectParam], ScalarResult[_TSelectParam]]:
results = super().execute(
statement,
params=params,
execution_options=execution_options,
bind_arguments=bind_arguments,
_parent_execute_state=_parent_execute_state,
_add_event=_add_event,
)
if isinstance(statement, SelectOfScalar):
return results.scalars()
return results # type: ignore
@deprecated(
"""
🚨 You probably want to use `session.exec()` instead of `session.execute()`.
This is the original SQLAlchemy `session.execute()` method that returns objects
of type `Row`, and that you have to call `scalars()` to get the model objects.
For example:
```Python
heroes = session.execute(select(Hero)).scalars().all()
```
instead you could use `exec()`:
```Python
heroes = session.exec(select(Hero)).all()
```
""",
category=None,
)
def execute( # type: ignore
self,
statement: _Executable,
params: Optional[_CoreAnyExecuteParams] = None,
*,
execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
bind_arguments: Optional[Dict[str, Any]] = None,
_parent_execute_state: Optional[Any] = None,
_add_event: Optional[Any] = None,
) -> Result[Any]:
"""
🚨 You probably want to use `session.exec()` instead of `session.execute()`.
This is the original SQLAlchemy `session.execute()` method that returns objects
of type `Row`, and that you have to call `scalars()` to get the model objects.
For example:
```Python
heroes = session.execute(select(Hero)).scalars().all()
```
instead you could use `exec()`:
```Python
heroes = session.exec(select(Hero)).all()
```
"""
return super().execute(
statement,
params=params,
execution_options=execution_options,
bind_arguments=bind_arguments,
_parent_execute_state=_parent_execute_state,
_add_event=_add_event,
)
@deprecated(
"""
🚨 You probably want to use `session.exec()` instead of `session.query()`.
`session.exec()` is SQLModel's own short version with increased type
annotations.
Or otherwise you might want to use `session.execute()` instead of
`session.query()`.
"""
)
def query( # type: ignore
self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
) -> _Query[Any]:
"""
🚨 You probably want to use `session.exec()` instead of `session.query()`.
`session.exec()` is SQLModel's own short version with increased type
annotations.
Or otherwise you might want to use `session.execute()` instead of
`session.query()`.
"""
return super().query(*entities, **kwargs)

View File

@ -0,0 +1 @@
from sqlalchemy.pool import StaticPool as StaticPool # noqa: F401

View File

@ -0,0 +1,43 @@
from typing import (
Tuple,
TypeVar,
Union,
)
from sqlalchemy.sql._typing import (
_ColumnExpressionArgument,
)
from sqlalchemy.sql.expression import Select as _Select
from typing_extensions import Self
_T = TypeVar("_T")
# Separate this class in SelectBase, Select, and SelectOfScalar so that they can share
# where and having without having type overlap incompatibility in session.exec().
class SelectBase(_Select[Tuple[_T]]):
inherit_cache = True
def where(self, *whereclause: Union[_ColumnExpressionArgument[bool], bool]) -> Self:
"""Return a new `Select` construct with the given expression added to
its `WHERE` clause, joined to the existing clause via `AND`, if any.
"""
return super().where(*whereclause) # type: ignore[arg-type]
def having(self, *having: Union[_ColumnExpressionArgument[bool], bool]) -> Self:
"""Return a new `Select` construct with the given expression added to
its `HAVING` clause, joined to the existing clause via `AND`, if any.
"""
return super().having(*having) # type: ignore[arg-type]
class Select(SelectBase[_T]):
inherit_cache = True
# This is not comparable to sqlalchemy.sql.selectable.ScalarSelect, that has a different
# purpose. This is the same as a normal SQLAlchemy Select class where there's only one
# entity, so the result will be converted to a scalar by default. This way writing
# for loops on the results will feel natural.
class SelectOfScalar(SelectBase[_T]):
inherit_cache = True

View File

@ -0,0 +1,367 @@
# WARNING: do not modify this code, it is generated by _expression_select_gen.py.jinja2
from datetime import datetime
from typing import (
Any,
Mapping,
Sequence,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from uuid import UUID
from sqlalchemy import (
Column,
)
from sqlalchemy.sql.elements import (
SQLCoreOperations,
)
from sqlalchemy.sql.roles import TypedColumnsClauseRole
from ._expression_select_cls import Select, SelectOfScalar
_T = TypeVar("_T")
_TCCA = Union[
TypedColumnsClauseRole[_T],
SQLCoreOperations[_T],
Type[_T],
]
# Generated TypeVars start
_TScalar_0 = TypeVar(
"_TScalar_0",
Column, # type: ignore
Sequence, # type: ignore
Mapping, # type: ignore
UUID,
datetime,
float,
int,
bool,
bytes,
str,
None,
)
_T0 = TypeVar("_T0")
_TScalar_1 = TypeVar(
"_TScalar_1",
Column, # type: ignore
Sequence, # type: ignore
Mapping, # type: ignore
UUID,
datetime,
float,
int,
bool,
bytes,
str,
None,
)
_T1 = TypeVar("_T1")
_TScalar_2 = TypeVar(
"_TScalar_2",
Column, # type: ignore
Sequence, # type: ignore
Mapping, # type: ignore
UUID,
datetime,
float,
int,
bool,
bytes,
str,
None,
)
_T2 = TypeVar("_T2")
_TScalar_3 = TypeVar(
"_TScalar_3",
Column, # type: ignore
Sequence, # type: ignore
Mapping, # type: ignore
UUID,
datetime,
float,
int,
bool,
bytes,
str,
None,
)
_T3 = TypeVar("_T3")
# Generated TypeVars end
@overload
def select(__ent0: _TCCA[_T0]) -> SelectOfScalar[_T0]: ...
@overload
def select(__ent0: _TScalar_0) -> SelectOfScalar[_TScalar_0]: # type: ignore
...
# Generated overloads start
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
) -> Select[Tuple[_T0, _T1]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
) -> Select[Tuple[_T0, _TScalar_1]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
) -> Select[Tuple[_TScalar_0, _T1]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
) -> Select[Tuple[_TScalar_0, _TScalar_1]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
) -> Select[Tuple[_T0, _T1, _T2]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
) -> Select[Tuple[_T0, _T1, _TScalar_2]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
) -> Select[Tuple[_T0, _TScalar_1, _T2]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
entity_2: _TScalar_2,
) -> Select[Tuple[_T0, _TScalar_1, _TScalar_2]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
) -> Select[Tuple[_TScalar_0, _T1, _T2]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
) -> Select[Tuple[_TScalar_0, _T1, _TScalar_2]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
) -> Select[Tuple[_TScalar_0, _TScalar_1, _T2]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
entity_2: _TScalar_2,
) -> Select[Tuple[_TScalar_0, _TScalar_1, _TScalar_2]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
entity_3: _TScalar_3,
) -> Select[Tuple[_T0, _T1, _T2, _TScalar_3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
__ent3: _TCCA[_T3],
) -> Select[Tuple[_T0, _T1, _TScalar_2, _T3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
entity_3: _TScalar_3,
) -> Select[Tuple[_T0, _T1, _TScalar_2, _TScalar_3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
) -> Select[Tuple[_T0, _TScalar_1, _T2, _T3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
entity_3: _TScalar_3,
) -> Select[Tuple[_T0, _TScalar_1, _T2, _TScalar_3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
entity_2: _TScalar_2,
__ent3: _TCCA[_T3],
) -> Select[Tuple[_T0, _TScalar_1, _TScalar_2, _T3]]: ...
@overload
def select( # type: ignore
__ent0: _TCCA[_T0],
entity_1: _TScalar_1,
entity_2: _TScalar_2,
entity_3: _TScalar_3,
) -> Select[Tuple[_T0, _TScalar_1, _TScalar_2, _TScalar_3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
) -> Select[Tuple[_TScalar_0, _T1, _T2, _T3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
__ent2: _TCCA[_T2],
entity_3: _TScalar_3,
) -> Select[Tuple[_TScalar_0, _T1, _T2, _TScalar_3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
__ent3: _TCCA[_T3],
) -> Select[Tuple[_TScalar_0, _T1, _TScalar_2, _T3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
__ent1: _TCCA[_T1],
entity_2: _TScalar_2,
entity_3: _TScalar_3,
) -> Select[Tuple[_TScalar_0, _T1, _TScalar_2, _TScalar_3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
__ent3: _TCCA[_T3],
) -> Select[Tuple[_TScalar_0, _TScalar_1, _T2, _T3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
__ent2: _TCCA[_T2],
entity_3: _TScalar_3,
) -> Select[Tuple[_TScalar_0, _TScalar_1, _T2, _TScalar_3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
entity_2: _TScalar_2,
__ent3: _TCCA[_T3],
) -> Select[Tuple[_TScalar_0, _TScalar_1, _TScalar_2, _T3]]: ...
@overload
def select( # type: ignore
entity_0: _TScalar_0,
entity_1: _TScalar_1,
entity_2: _TScalar_2,
entity_3: _TScalar_3,
) -> Select[Tuple[_TScalar_0, _TScalar_1, _TScalar_2, _TScalar_3]]: ...
# Generated overloads end
def select(*entities: Any) -> Union[Select, SelectOfScalar]: # type: ignore
if len(entities) == 1:
return SelectOfScalar(*entities)
return Select(*entities)

View File

@ -0,0 +1,84 @@
from datetime import datetime
from typing import (
Any,
Mapping,
Sequence,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from uuid import UUID
from sqlalchemy import (
Column,
)
from sqlalchemy.sql.elements import (
SQLCoreOperations,
)
from sqlalchemy.sql.roles import TypedColumnsClauseRole
from ._expression_select_cls import Select, SelectOfScalar
_T = TypeVar("_T")
_TCCA = Union[
TypedColumnsClauseRole[_T],
SQLCoreOperations[_T],
Type[_T],
]
# Generated TypeVars start
{% for i in range(number_of_types) %}
_TScalar_{{ i }} = TypeVar(
"_TScalar_{{ i }}",
Column, # type: ignore
Sequence, # type: ignore
Mapping, # type: ignore
UUID,
datetime,
float,
int,
bool,
bytes,
str,
None,
)
_T{{ i }} = TypeVar("_T{{ i }}")
{% endfor %}
# Generated TypeVars end
@overload
def select(__ent0: _TCCA[_T0]) -> SelectOfScalar[_T0]: ...
@overload
def select(__ent0: _TScalar_0) -> SelectOfScalar[_TScalar_0]: # type: ignore
...
# Generated overloads start
{% for signature in signatures %}
@overload
def select( # type: ignore
{% for arg in signature[0] %}{{ arg.name }}: {{ arg.annotation }}, {% endfor %}
) -> Select[Tuple[{%for ret in signature[1] %}{{ ret }} {% if not loop.last %}, {% endif %}{% endfor %}]]: ...
{% endfor %}
# Generated overloads end
def select(*entities: Any) -> Union[Select, SelectOfScalar]: # type: ignore
if len(entities) == 1:
return SelectOfScalar(*entities)
return Select(*entities)

View File

@ -0,0 +1,9 @@
from typing import Generic, TypeVar
from sqlalchemy.sql.base import Executable as _Executable
_T = TypeVar("_T")
class Executable(_Executable, Generic[_T]):
pass

View File

@ -0,0 +1,215 @@
from typing import (
Any,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
import sqlalchemy
from sqlalchemy import (
Column,
ColumnElement,
Extract,
FunctionElement,
FunctionFilter,
Label,
Over,
TypeCoerce,
WithinGroup,
)
from sqlalchemy.orm import InstrumentedAttribute, Mapped
from sqlalchemy.sql._typing import (
_ColumnExpressionArgument,
_ColumnExpressionOrLiteralArgument,
_ColumnExpressionOrStrLabelArgument,
)
from sqlalchemy.sql.elements import (
BinaryExpression,
Case,
Cast,
CollectionAggregate,
ColumnClause,
TryCast,
UnaryExpression,
)
from sqlalchemy.sql.type_api import TypeEngine
from typing_extensions import Literal
from ._expression_select_cls import Select as Select
from ._expression_select_cls import SelectOfScalar as SelectOfScalar
from ._expression_select_gen import select as select
_T = TypeVar("_T")
_TypeEngineArgument = Union[Type[TypeEngine[_T]], TypeEngine[_T]]
# Redefine operatos that would only take a column expresion to also take the (virtual)
# types of Pydantic models, e.g. str instead of only Mapped[str].
def all_(expr: Union[_ColumnExpressionArgument[_T], _T]) -> CollectionAggregate[bool]:
return sqlalchemy.all_(expr) # type: ignore[arg-type]
def and_(
initial_clause: Union[Literal[True], _ColumnExpressionArgument[bool], bool],
*clauses: Union[_ColumnExpressionArgument[bool], bool],
) -> ColumnElement[bool]:
return sqlalchemy.and_(initial_clause, *clauses) # type: ignore[arg-type]
def any_(expr: Union[_ColumnExpressionArgument[_T], _T]) -> CollectionAggregate[bool]:
return sqlalchemy.any_(expr) # type: ignore[arg-type]
def asc(
column: Union[_ColumnExpressionOrStrLabelArgument[_T], _T],
) -> UnaryExpression[_T]:
return sqlalchemy.asc(column) # type: ignore[arg-type]
def collate(
expression: Union[_ColumnExpressionArgument[str], str], collation: str
) -> BinaryExpression[str]:
return sqlalchemy.collate(expression, collation) # type: ignore[arg-type]
def between(
expr: Union[_ColumnExpressionOrLiteralArgument[_T], _T],
lower_bound: Any,
upper_bound: Any,
symmetric: bool = False,
) -> BinaryExpression[bool]:
return sqlalchemy.between(expr, lower_bound, upper_bound, symmetric=symmetric)
def not_(clause: Union[_ColumnExpressionArgument[_T], _T]) -> ColumnElement[_T]:
return sqlalchemy.not_(clause) # type: ignore[arg-type]
def case(
*whens: Union[
Tuple[Union[_ColumnExpressionArgument[bool], bool], Any], Mapping[Any, Any]
],
value: Optional[Any] = None,
else_: Optional[Any] = None,
) -> Case[Any]:
return sqlalchemy.case(*whens, value=value, else_=else_) # type: ignore[arg-type]
def cast(
expression: Union[_ColumnExpressionOrLiteralArgument[Any], Any],
type_: "_TypeEngineArgument[_T]",
) -> Cast[_T]:
return sqlalchemy.cast(expression, type_)
def try_cast(
expression: Union[_ColumnExpressionOrLiteralArgument[Any], Any],
type_: "_TypeEngineArgument[_T]",
) -> TryCast[_T]:
return sqlalchemy.try_cast(expression, type_)
def desc(
column: Union[_ColumnExpressionOrStrLabelArgument[_T], _T],
) -> UnaryExpression[_T]:
return sqlalchemy.desc(column) # type: ignore[arg-type]
def distinct(expr: Union[_ColumnExpressionArgument[_T], _T]) -> UnaryExpression[_T]:
return sqlalchemy.distinct(expr) # type: ignore[arg-type]
def bitwise_not(expr: Union[_ColumnExpressionArgument[_T], _T]) -> UnaryExpression[_T]:
return sqlalchemy.bitwise_not(expr) # type: ignore[arg-type]
def extract(field: str, expr: Union[_ColumnExpressionArgument[Any], Any]) -> Extract:
return sqlalchemy.extract(field, expr)
def funcfilter(
func: FunctionElement[_T], *criterion: Union[_ColumnExpressionArgument[bool], bool]
) -> FunctionFilter[_T]:
return sqlalchemy.funcfilter(func, *criterion) # type: ignore[arg-type]
def label(
name: str,
element: Union[_ColumnExpressionArgument[_T], _T],
type_: Optional["_TypeEngineArgument[_T]"] = None,
) -> Label[_T]:
return sqlalchemy.label(name, element, type_=type_) # type: ignore[arg-type]
def nulls_first(
column: Union[_ColumnExpressionArgument[_T], _T],
) -> UnaryExpression[_T]:
return sqlalchemy.nulls_first(column) # type: ignore[arg-type]
def nulls_last(column: Union[_ColumnExpressionArgument[_T], _T]) -> UnaryExpression[_T]:
return sqlalchemy.nulls_last(column) # type: ignore[arg-type]
def or_(
initial_clause: Union[Literal[False], _ColumnExpressionArgument[bool], bool],
*clauses: Union[_ColumnExpressionArgument[bool], bool],
) -> ColumnElement[bool]:
return sqlalchemy.or_(initial_clause, *clauses) # type: ignore[arg-type]
def over(
element: FunctionElement[_T],
partition_by: Optional[
Union[
Iterable[Union[_ColumnExpressionArgument[Any], Any]],
_ColumnExpressionArgument[Any],
Any,
]
] = None,
order_by: Optional[
Union[
Iterable[Union[_ColumnExpressionArgument[Any], Any]],
_ColumnExpressionArgument[Any],
Any,
]
] = None,
range_: Optional[Tuple[Optional[int], Optional[int]]] = None,
rows: Optional[Tuple[Optional[int], Optional[int]]] = None,
) -> Over[_T]:
return sqlalchemy.over(
element, partition_by=partition_by, order_by=order_by, range_=range_, rows=rows
)
def tuple_(
*clauses: Union[_ColumnExpressionArgument[Any], Any],
types: Optional[Sequence["_TypeEngineArgument[Any]"]] = None,
) -> Tuple[Any, ...]:
return sqlalchemy.tuple_(*clauses, types=types) # type: ignore[return-value]
def type_coerce(
expression: Union[_ColumnExpressionOrLiteralArgument[Any], Any],
type_: "_TypeEngineArgument[_T]",
) -> TypeCoerce[_T]:
return sqlalchemy.type_coerce(expression, type_)
def within_group(
element: FunctionElement[_T], *order_by: Union[_ColumnExpressionArgument[Any], Any]
) -> WithinGroup[_T]:
return sqlalchemy.within_group(element, *order_by)
def col(column_expression: _T) -> Mapped[_T]:
if not isinstance(column_expression, (ColumnClause, Column, InstrumentedAttribute)):
raise RuntimeError(f"Not a SQLAlchemy column: {column_expression}")
return column_expression # type: ignore

View File

@ -0,0 +1,16 @@
from typing import Any, cast
from sqlalchemy import types
from sqlalchemy.engine.interfaces import Dialect
class AutoString(types.TypeDecorator): # type: ignore
impl = types.String
cache_ok = True
mysql_default_length = 255
def load_dialect_impl(self, dialect: Dialect) -> "types.TypeEngine[Any]":
impl = cast(types.String, self.impl)
if impl.length is None and dialect.name == "mysql":
return dialect.type_descriptor(types.String(self.mysql_default_length))
return super().load_dialect_impl(dialect)