Update 2025-04-13_16:25:39
This commit is contained in:
294
venv/lib/python3.11/site-packages/sqlalchemy/__init__.py
Normal file
294
venv/lib/python3.11/site-packages/sqlalchemy/__init__.py
Normal file
@ -0,0 +1,294 @@
|
||||
# __init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from . import util as _util
|
||||
from .engine import AdaptedConnection as AdaptedConnection
|
||||
from .engine import BaseRow as BaseRow
|
||||
from .engine import BindTyping as BindTyping
|
||||
from .engine import ChunkedIteratorResult as ChunkedIteratorResult
|
||||
from .engine import Compiled as Compiled
|
||||
from .engine import Connection as Connection
|
||||
from .engine import create_engine as create_engine
|
||||
from .engine import create_mock_engine as create_mock_engine
|
||||
from .engine import create_pool_from_url as create_pool_from_url
|
||||
from .engine import CreateEnginePlugin as CreateEnginePlugin
|
||||
from .engine import CursorResult as CursorResult
|
||||
from .engine import Dialect as Dialect
|
||||
from .engine import Engine as Engine
|
||||
from .engine import engine_from_config as engine_from_config
|
||||
from .engine import ExceptionContext as ExceptionContext
|
||||
from .engine import ExecutionContext as ExecutionContext
|
||||
from .engine import FrozenResult as FrozenResult
|
||||
from .engine import Inspector as Inspector
|
||||
from .engine import IteratorResult as IteratorResult
|
||||
from .engine import make_url as make_url
|
||||
from .engine import MappingResult as MappingResult
|
||||
from .engine import MergedResult as MergedResult
|
||||
from .engine import NestedTransaction as NestedTransaction
|
||||
from .engine import Result as Result
|
||||
from .engine import result_tuple as result_tuple
|
||||
from .engine import ResultProxy as ResultProxy
|
||||
from .engine import RootTransaction as RootTransaction
|
||||
from .engine import Row as Row
|
||||
from .engine import RowMapping as RowMapping
|
||||
from .engine import ScalarResult as ScalarResult
|
||||
from .engine import Transaction as Transaction
|
||||
from .engine import TwoPhaseTransaction as TwoPhaseTransaction
|
||||
from .engine import TypeCompiler as TypeCompiler
|
||||
from .engine import URL as URL
|
||||
from .inspection import inspect as inspect
|
||||
from .pool import AssertionPool as AssertionPool
|
||||
from .pool import AsyncAdaptedQueuePool as AsyncAdaptedQueuePool
|
||||
from .pool import (
|
||||
FallbackAsyncAdaptedQueuePool as FallbackAsyncAdaptedQueuePool,
|
||||
)
|
||||
from .pool import NullPool as NullPool
|
||||
from .pool import Pool as Pool
|
||||
from .pool import PoolProxiedConnection as PoolProxiedConnection
|
||||
from .pool import PoolResetState as PoolResetState
|
||||
from .pool import QueuePool as QueuePool
|
||||
from .pool import SingletonThreadPool as SingletonThreadPool
|
||||
from .pool import StaticPool as StaticPool
|
||||
from .schema import BaseDDLElement as BaseDDLElement
|
||||
from .schema import BLANK_SCHEMA as BLANK_SCHEMA
|
||||
from .schema import CheckConstraint as CheckConstraint
|
||||
from .schema import Column as Column
|
||||
from .schema import ColumnDefault as ColumnDefault
|
||||
from .schema import Computed as Computed
|
||||
from .schema import Constraint as Constraint
|
||||
from .schema import DDL as DDL
|
||||
from .schema import DDLElement as DDLElement
|
||||
from .schema import DefaultClause as DefaultClause
|
||||
from .schema import ExecutableDDLElement as ExecutableDDLElement
|
||||
from .schema import FetchedValue as FetchedValue
|
||||
from .schema import ForeignKey as ForeignKey
|
||||
from .schema import ForeignKeyConstraint as ForeignKeyConstraint
|
||||
from .schema import Identity as Identity
|
||||
from .schema import Index as Index
|
||||
from .schema import insert_sentinel as insert_sentinel
|
||||
from .schema import MetaData as MetaData
|
||||
from .schema import PrimaryKeyConstraint as PrimaryKeyConstraint
|
||||
from .schema import Sequence as Sequence
|
||||
from .schema import Table as Table
|
||||
from .schema import UniqueConstraint as UniqueConstraint
|
||||
from .sql import ColumnExpressionArgument as ColumnExpressionArgument
|
||||
from .sql import NotNullable as NotNullable
|
||||
from .sql import Nullable as Nullable
|
||||
from .sql import SelectLabelStyle as SelectLabelStyle
|
||||
from .sql.expression import Alias as Alias
|
||||
from .sql.expression import alias as alias
|
||||
from .sql.expression import AliasedReturnsRows as AliasedReturnsRows
|
||||
from .sql.expression import all_ as all_
|
||||
from .sql.expression import and_ as and_
|
||||
from .sql.expression import any_ as any_
|
||||
from .sql.expression import asc as asc
|
||||
from .sql.expression import between as between
|
||||
from .sql.expression import BinaryExpression as BinaryExpression
|
||||
from .sql.expression import bindparam as bindparam
|
||||
from .sql.expression import BindParameter as BindParameter
|
||||
from .sql.expression import bitwise_not as bitwise_not
|
||||
from .sql.expression import BooleanClauseList as BooleanClauseList
|
||||
from .sql.expression import CacheKey as CacheKey
|
||||
from .sql.expression import Case as Case
|
||||
from .sql.expression import case as case
|
||||
from .sql.expression import Cast as Cast
|
||||
from .sql.expression import cast as cast
|
||||
from .sql.expression import ClauseElement as ClauseElement
|
||||
from .sql.expression import ClauseList as ClauseList
|
||||
from .sql.expression import collate as collate
|
||||
from .sql.expression import CollectionAggregate as CollectionAggregate
|
||||
from .sql.expression import column as column
|
||||
from .sql.expression import ColumnClause as ColumnClause
|
||||
from .sql.expression import ColumnCollection as ColumnCollection
|
||||
from .sql.expression import ColumnElement as ColumnElement
|
||||
from .sql.expression import ColumnOperators as ColumnOperators
|
||||
from .sql.expression import CompoundSelect as CompoundSelect
|
||||
from .sql.expression import CTE as CTE
|
||||
from .sql.expression import cte as cte
|
||||
from .sql.expression import custom_op as custom_op
|
||||
from .sql.expression import Delete as Delete
|
||||
from .sql.expression import delete as delete
|
||||
from .sql.expression import desc as desc
|
||||
from .sql.expression import distinct as distinct
|
||||
from .sql.expression import except_ as except_
|
||||
from .sql.expression import except_all as except_all
|
||||
from .sql.expression import Executable as Executable
|
||||
from .sql.expression import Exists as Exists
|
||||
from .sql.expression import exists as exists
|
||||
from .sql.expression import Extract as Extract
|
||||
from .sql.expression import extract as extract
|
||||
from .sql.expression import false as false
|
||||
from .sql.expression import False_ as False_
|
||||
from .sql.expression import FromClause as FromClause
|
||||
from .sql.expression import FromGrouping as FromGrouping
|
||||
from .sql.expression import func as func
|
||||
from .sql.expression import funcfilter as funcfilter
|
||||
from .sql.expression import Function as Function
|
||||
from .sql.expression import FunctionElement as FunctionElement
|
||||
from .sql.expression import FunctionFilter as FunctionFilter
|
||||
from .sql.expression import GenerativeSelect as GenerativeSelect
|
||||
from .sql.expression import Grouping as Grouping
|
||||
from .sql.expression import HasCTE as HasCTE
|
||||
from .sql.expression import HasPrefixes as HasPrefixes
|
||||
from .sql.expression import HasSuffixes as HasSuffixes
|
||||
from .sql.expression import Insert as Insert
|
||||
from .sql.expression import insert as insert
|
||||
from .sql.expression import intersect as intersect
|
||||
from .sql.expression import intersect_all as intersect_all
|
||||
from .sql.expression import Join as Join
|
||||
from .sql.expression import join as join
|
||||
from .sql.expression import Label as Label
|
||||
from .sql.expression import label as label
|
||||
from .sql.expression import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT
|
||||
from .sql.expression import (
|
||||
LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY,
|
||||
)
|
||||
from .sql.expression import LABEL_STYLE_NONE as LABEL_STYLE_NONE
|
||||
from .sql.expression import (
|
||||
LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL,
|
||||
)
|
||||
from .sql.expression import lambda_stmt as lambda_stmt
|
||||
from .sql.expression import LambdaElement as LambdaElement
|
||||
from .sql.expression import Lateral as Lateral
|
||||
from .sql.expression import lateral as lateral
|
||||
from .sql.expression import literal as literal
|
||||
from .sql.expression import literal_column as literal_column
|
||||
from .sql.expression import modifier as modifier
|
||||
from .sql.expression import not_ as not_
|
||||
from .sql.expression import Null as Null
|
||||
from .sql.expression import null as null
|
||||
from .sql.expression import nulls_first as nulls_first
|
||||
from .sql.expression import nulls_last as nulls_last
|
||||
from .sql.expression import nullsfirst as nullsfirst
|
||||
from .sql.expression import nullslast as nullslast
|
||||
from .sql.expression import Operators as Operators
|
||||
from .sql.expression import or_ as or_
|
||||
from .sql.expression import outerjoin as outerjoin
|
||||
from .sql.expression import outparam as outparam
|
||||
from .sql.expression import Over as Over
|
||||
from .sql.expression import over as over
|
||||
from .sql.expression import quoted_name as quoted_name
|
||||
from .sql.expression import ReleaseSavepointClause as ReleaseSavepointClause
|
||||
from .sql.expression import ReturnsRows as ReturnsRows
|
||||
from .sql.expression import (
|
||||
RollbackToSavepointClause as RollbackToSavepointClause,
|
||||
)
|
||||
from .sql.expression import SavepointClause as SavepointClause
|
||||
from .sql.expression import ScalarSelect as ScalarSelect
|
||||
from .sql.expression import Select as Select
|
||||
from .sql.expression import select as select
|
||||
from .sql.expression import Selectable as Selectable
|
||||
from .sql.expression import SelectBase as SelectBase
|
||||
from .sql.expression import SQLColumnExpression as SQLColumnExpression
|
||||
from .sql.expression import StatementLambdaElement as StatementLambdaElement
|
||||
from .sql.expression import Subquery as Subquery
|
||||
from .sql.expression import table as table
|
||||
from .sql.expression import TableClause as TableClause
|
||||
from .sql.expression import TableSample as TableSample
|
||||
from .sql.expression import tablesample as tablesample
|
||||
from .sql.expression import TableValuedAlias as TableValuedAlias
|
||||
from .sql.expression import text as text
|
||||
from .sql.expression import TextAsFrom as TextAsFrom
|
||||
from .sql.expression import TextClause as TextClause
|
||||
from .sql.expression import TextualSelect as TextualSelect
|
||||
from .sql.expression import true as true
|
||||
from .sql.expression import True_ as True_
|
||||
from .sql.expression import try_cast as try_cast
|
||||
from .sql.expression import TryCast as TryCast
|
||||
from .sql.expression import Tuple as Tuple
|
||||
from .sql.expression import tuple_ as tuple_
|
||||
from .sql.expression import type_coerce as type_coerce
|
||||
from .sql.expression import TypeClause as TypeClause
|
||||
from .sql.expression import TypeCoerce as TypeCoerce
|
||||
from .sql.expression import UnaryExpression as UnaryExpression
|
||||
from .sql.expression import union as union
|
||||
from .sql.expression import union_all as union_all
|
||||
from .sql.expression import Update as Update
|
||||
from .sql.expression import update as update
|
||||
from .sql.expression import UpdateBase as UpdateBase
|
||||
from .sql.expression import Values as Values
|
||||
from .sql.expression import values as values
|
||||
from .sql.expression import ValuesBase as ValuesBase
|
||||
from .sql.expression import Visitable as Visitable
|
||||
from .sql.expression import within_group as within_group
|
||||
from .sql.expression import WithinGroup as WithinGroup
|
||||
from .types import ARRAY as ARRAY
|
||||
from .types import BIGINT as BIGINT
|
||||
from .types import BigInteger as BigInteger
|
||||
from .types import BINARY as BINARY
|
||||
from .types import BLOB as BLOB
|
||||
from .types import BOOLEAN as BOOLEAN
|
||||
from .types import Boolean as Boolean
|
||||
from .types import CHAR as CHAR
|
||||
from .types import CLOB as CLOB
|
||||
from .types import DATE as DATE
|
||||
from .types import Date as Date
|
||||
from .types import DATETIME as DATETIME
|
||||
from .types import DateTime as DateTime
|
||||
from .types import DECIMAL as DECIMAL
|
||||
from .types import DOUBLE as DOUBLE
|
||||
from .types import Double as Double
|
||||
from .types import DOUBLE_PRECISION as DOUBLE_PRECISION
|
||||
from .types import Enum as Enum
|
||||
from .types import FLOAT as FLOAT
|
||||
from .types import Float as Float
|
||||
from .types import INT as INT
|
||||
from .types import INTEGER as INTEGER
|
||||
from .types import Integer as Integer
|
||||
from .types import Interval as Interval
|
||||
from .types import JSON as JSON
|
||||
from .types import LargeBinary as LargeBinary
|
||||
from .types import NCHAR as NCHAR
|
||||
from .types import NUMERIC as NUMERIC
|
||||
from .types import Numeric as Numeric
|
||||
from .types import NVARCHAR as NVARCHAR
|
||||
from .types import PickleType as PickleType
|
||||
from .types import REAL as REAL
|
||||
from .types import SMALLINT as SMALLINT
|
||||
from .types import SmallInteger as SmallInteger
|
||||
from .types import String as String
|
||||
from .types import TEXT as TEXT
|
||||
from .types import Text as Text
|
||||
from .types import TIME as TIME
|
||||
from .types import Time as Time
|
||||
from .types import TIMESTAMP as TIMESTAMP
|
||||
from .types import TupleType as TupleType
|
||||
from .types import TypeDecorator as TypeDecorator
|
||||
from .types import Unicode as Unicode
|
||||
from .types import UnicodeText as UnicodeText
|
||||
from .types import UUID as UUID
|
||||
from .types import Uuid as Uuid
|
||||
from .types import VARBINARY as VARBINARY
|
||||
from .types import VARCHAR as VARCHAR
|
||||
|
||||
__version__ = "2.0.40"
|
||||
|
||||
|
||||
def __go(lcls: Any) -> None:
|
||||
_util.preloaded.import_prefix("sqlalchemy")
|
||||
|
||||
from . import exc
|
||||
|
||||
exc._version_token = "".join(__version__.split(".")[0:2])
|
||||
|
||||
|
||||
__go(locals())
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name == "SingleonThreadPool":
|
||||
_util.warn_deprecated(
|
||||
"SingleonThreadPool was a typo in the v2 series. "
|
||||
"Please use the correct SingletonThreadPool name.",
|
||||
"2.0.24",
|
||||
)
|
||||
return SingletonThreadPool
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,18 @@
|
||||
# connectors/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from ..engine.interfaces import Dialect
|
||||
|
||||
|
||||
class Connector(Dialect):
|
||||
"""Base class for dialect mixins, for DBAPIs that work
|
||||
across entirely different database backends.
|
||||
|
||||
Currently the only such mixin is pyodbc.
|
||||
|
||||
"""
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,174 @@
|
||||
# connectors/aioodbc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .asyncio import AsyncAdapt_dbapi_connection
|
||||
from .asyncio import AsyncAdapt_dbapi_cursor
|
||||
from .asyncio import AsyncAdapt_dbapi_ss_cursor
|
||||
from .asyncio import AsyncAdaptFallback_dbapi_connection
|
||||
from .pyodbc import PyODBCConnector
|
||||
from .. import pool
|
||||
from .. import util
|
||||
from ..util.concurrency import await_fallback
|
||||
from ..util.concurrency import await_only
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..engine.interfaces import ConnectArgsType
|
||||
from ..engine.url import URL
|
||||
|
||||
|
||||
class AsyncAdapt_aioodbc_cursor(AsyncAdapt_dbapi_cursor):
|
||||
__slots__ = ()
|
||||
|
||||
def setinputsizes(self, *inputsizes):
|
||||
# see https://github.com/aio-libs/aioodbc/issues/451
|
||||
return self._cursor._impl.setinputsizes(*inputsizes)
|
||||
|
||||
# how it's supposed to work
|
||||
# return self.await_(self._cursor.setinputsizes(*inputsizes))
|
||||
|
||||
|
||||
class AsyncAdapt_aioodbc_ss_cursor(
|
||||
AsyncAdapt_aioodbc_cursor, AsyncAdapt_dbapi_ss_cursor
|
||||
):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class AsyncAdapt_aioodbc_connection(AsyncAdapt_dbapi_connection):
|
||||
_cursor_cls = AsyncAdapt_aioodbc_cursor
|
||||
_ss_cursor_cls = AsyncAdapt_aioodbc_ss_cursor
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
def autocommit(self):
|
||||
return self._connection.autocommit
|
||||
|
||||
@autocommit.setter
|
||||
def autocommit(self, value):
|
||||
# https://github.com/aio-libs/aioodbc/issues/448
|
||||
# self._connection.autocommit = value
|
||||
|
||||
self._connection._conn.autocommit = value
|
||||
|
||||
def cursor(self, server_side=False):
|
||||
# aioodbc sets connection=None when closed and just fails with
|
||||
# AttributeError here. Here we use the same ProgrammingError +
|
||||
# message that pyodbc uses, so it triggers is_disconnect() as well.
|
||||
if self._connection.closed:
|
||||
raise self.dbapi.ProgrammingError(
|
||||
"Attempt to use a closed connection."
|
||||
)
|
||||
return super().cursor(server_side=server_side)
|
||||
|
||||
def rollback(self):
|
||||
# aioodbc sets connection=None when closed and just fails with
|
||||
# AttributeError here. should be a no-op
|
||||
if not self._connection.closed:
|
||||
super().rollback()
|
||||
|
||||
def commit(self):
|
||||
# aioodbc sets connection=None when closed and just fails with
|
||||
# AttributeError here. should be a no-op
|
||||
if not self._connection.closed:
|
||||
super().commit()
|
||||
|
||||
def close(self):
|
||||
# aioodbc sets connection=None when closed and just fails with
|
||||
# AttributeError here. should be a no-op
|
||||
if not self._connection.closed:
|
||||
super().close()
|
||||
|
||||
|
||||
class AsyncAdaptFallback_aioodbc_connection(
|
||||
AsyncAdaptFallback_dbapi_connection, AsyncAdapt_aioodbc_connection
|
||||
):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class AsyncAdapt_aioodbc_dbapi:
|
||||
def __init__(self, aioodbc, pyodbc):
|
||||
self.aioodbc = aioodbc
|
||||
self.pyodbc = pyodbc
|
||||
self.paramstyle = pyodbc.paramstyle
|
||||
self._init_dbapi_attributes()
|
||||
self.Cursor = AsyncAdapt_dbapi_cursor
|
||||
self.version = pyodbc.version
|
||||
|
||||
def _init_dbapi_attributes(self):
|
||||
for name in (
|
||||
"Warning",
|
||||
"Error",
|
||||
"InterfaceError",
|
||||
"DataError",
|
||||
"DatabaseError",
|
||||
"OperationalError",
|
||||
"InterfaceError",
|
||||
"IntegrityError",
|
||||
"ProgrammingError",
|
||||
"InternalError",
|
||||
"NotSupportedError",
|
||||
"NUMBER",
|
||||
"STRING",
|
||||
"DATETIME",
|
||||
"BINARY",
|
||||
"Binary",
|
||||
"BinaryNull",
|
||||
"SQL_VARCHAR",
|
||||
"SQL_WVARCHAR",
|
||||
):
|
||||
setattr(self, name, getattr(self.pyodbc, name))
|
||||
|
||||
def connect(self, *arg, **kw):
|
||||
async_fallback = kw.pop("async_fallback", False)
|
||||
creator_fn = kw.pop("async_creator_fn", self.aioodbc.connect)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return AsyncAdaptFallback_aioodbc_connection(
|
||||
self,
|
||||
await_fallback(creator_fn(*arg, **kw)),
|
||||
)
|
||||
else:
|
||||
return AsyncAdapt_aioodbc_connection(
|
||||
self,
|
||||
await_only(creator_fn(*arg, **kw)),
|
||||
)
|
||||
|
||||
|
||||
class aiodbcConnector(PyODBCConnector):
|
||||
is_async = True
|
||||
supports_statement_cache = True
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return AsyncAdapt_aioodbc_dbapi(
|
||||
__import__("aioodbc"), __import__("pyodbc")
|
||||
)
|
||||
|
||||
def create_connect_args(self, url: URL) -> ConnectArgsType:
|
||||
arg, kw = super().create_connect_args(url)
|
||||
if arg and arg[0]:
|
||||
kw["dsn"] = arg[0]
|
||||
|
||||
return (), kw
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
async_fallback = url.query.get("async_fallback", False)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return pool.FallbackAsyncAdaptedQueuePool
|
||||
else:
|
||||
return pool.AsyncAdaptedQueuePool
|
||||
|
||||
def get_driver_connection(self, connection):
|
||||
return connection._connection
|
@ -0,0 +1,213 @@
|
||||
# connectors/asyncio.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
"""generic asyncio-adapted versions of DBAPI connection and cursor"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
|
||||
from ..engine import AdaptedConnection
|
||||
from ..util.concurrency import asyncio
|
||||
from ..util.concurrency import await_fallback
|
||||
from ..util.concurrency import await_only
|
||||
|
||||
|
||||
class AsyncAdapt_dbapi_cursor:
|
||||
server_side = False
|
||||
__slots__ = (
|
||||
"_adapt_connection",
|
||||
"_connection",
|
||||
"await_",
|
||||
"_cursor",
|
||||
"_rows",
|
||||
)
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor()
|
||||
self._cursor = self._aenter_cursor(cursor)
|
||||
|
||||
if not self.server_side:
|
||||
self._rows = collections.deque()
|
||||
|
||||
def _aenter_cursor(self, cursor):
|
||||
return self.await_(cursor.__aenter__())
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self._cursor.description
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
return self._cursor.rowcount
|
||||
|
||||
@property
|
||||
def arraysize(self):
|
||||
return self._cursor.arraysize
|
||||
|
||||
@arraysize.setter
|
||||
def arraysize(self, value):
|
||||
self._cursor.arraysize = value
|
||||
|
||||
@property
|
||||
def lastrowid(self):
|
||||
return self._cursor.lastrowid
|
||||
|
||||
def close(self):
|
||||
# note we aren't actually closing the cursor here,
|
||||
# we are just letting GC do it. see notes in aiomysql dialect
|
||||
self._rows.clear()
|
||||
|
||||
def execute(self, operation, parameters=None):
|
||||
return self.await_(self._execute_async(operation, parameters))
|
||||
|
||||
def executemany(self, operation, seq_of_parameters):
|
||||
return self.await_(
|
||||
self._executemany_async(operation, seq_of_parameters)
|
||||
)
|
||||
|
||||
async def _execute_async(self, operation, parameters):
|
||||
async with self._adapt_connection._execute_mutex:
|
||||
result = await self._cursor.execute(operation, parameters or ())
|
||||
|
||||
if self._cursor.description and not self.server_side:
|
||||
self._rows = collections.deque(await self._cursor.fetchall())
|
||||
return result
|
||||
|
||||
async def _executemany_async(self, operation, seq_of_parameters):
|
||||
async with self._adapt_connection._execute_mutex:
|
||||
return await self._cursor.executemany(operation, seq_of_parameters)
|
||||
|
||||
def nextset(self):
|
||||
self.await_(self._cursor.nextset())
|
||||
if self._cursor.description and not self.server_side:
|
||||
self._rows = collections.deque(
|
||||
self.await_(self._cursor.fetchall())
|
||||
)
|
||||
|
||||
def setinputsizes(self, *inputsizes):
|
||||
# NOTE: this is overrridden in aioodbc due to
|
||||
# see https://github.com/aio-libs/aioodbc/issues/451
|
||||
# right now
|
||||
|
||||
return self.await_(self._cursor.setinputsizes(*inputsizes))
|
||||
|
||||
def __iter__(self):
|
||||
while self._rows:
|
||||
yield self._rows.popleft()
|
||||
|
||||
def fetchone(self):
|
||||
if self._rows:
|
||||
return self._rows.popleft()
|
||||
else:
|
||||
return None
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
if size is None:
|
||||
size = self.arraysize
|
||||
rr = self._rows
|
||||
return [rr.popleft() for _ in range(min(size, len(rr)))]
|
||||
|
||||
def fetchall(self):
|
||||
retval = list(self._rows)
|
||||
self._rows.clear()
|
||||
return retval
|
||||
|
||||
|
||||
class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor):
|
||||
__slots__ = ()
|
||||
server_side = True
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor()
|
||||
|
||||
self._cursor = self.await_(cursor.__aenter__())
|
||||
|
||||
def close(self):
|
||||
if self._cursor is not None:
|
||||
self.await_(self._cursor.close())
|
||||
self._cursor = None
|
||||
|
||||
def fetchone(self):
|
||||
return self.await_(self._cursor.fetchone())
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
return self.await_(self._cursor.fetchmany(size=size))
|
||||
|
||||
def fetchall(self):
|
||||
return self.await_(self._cursor.fetchall())
|
||||
|
||||
def __iter__(self):
|
||||
iterator = self._cursor.__aiter__()
|
||||
while True:
|
||||
try:
|
||||
yield self.await_(iterator.__anext__())
|
||||
except StopAsyncIteration:
|
||||
break
|
||||
|
||||
|
||||
class AsyncAdapt_dbapi_connection(AdaptedConnection):
|
||||
_cursor_cls = AsyncAdapt_dbapi_cursor
|
||||
_ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor
|
||||
|
||||
await_ = staticmethod(await_only)
|
||||
__slots__ = ("dbapi", "_execute_mutex")
|
||||
|
||||
def __init__(self, dbapi, connection):
|
||||
self.dbapi = dbapi
|
||||
self._connection = connection
|
||||
self._execute_mutex = asyncio.Lock()
|
||||
|
||||
def ping(self, reconnect):
|
||||
return self.await_(self._connection.ping(reconnect))
|
||||
|
||||
def add_output_converter(self, *arg, **kw):
|
||||
self._connection.add_output_converter(*arg, **kw)
|
||||
|
||||
def character_set_name(self):
|
||||
return self._connection.character_set_name()
|
||||
|
||||
@property
|
||||
def autocommit(self):
|
||||
return self._connection.autocommit
|
||||
|
||||
@autocommit.setter
|
||||
def autocommit(self, value):
|
||||
# https://github.com/aio-libs/aioodbc/issues/448
|
||||
# self._connection.autocommit = value
|
||||
|
||||
self._connection._conn.autocommit = value
|
||||
|
||||
def cursor(self, server_side=False):
|
||||
if server_side:
|
||||
return self._ss_cursor_cls(self)
|
||||
else:
|
||||
return self._cursor_cls(self)
|
||||
|
||||
def rollback(self):
|
||||
self.await_(self._connection.rollback())
|
||||
|
||||
def commit(self):
|
||||
self.await_(self._connection.commit())
|
||||
|
||||
def close(self):
|
||||
self.await_(self._connection.close())
|
||||
|
||||
|
||||
class AsyncAdaptFallback_dbapi_connection(AsyncAdapt_dbapi_connection):
|
||||
__slots__ = ()
|
||||
|
||||
await_ = staticmethod(await_fallback)
|
@ -0,0 +1,247 @@
|
||||
# connectors/pyodbc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from types import ModuleType
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
from urllib.parse import unquote_plus
|
||||
|
||||
from . import Connector
|
||||
from .. import ExecutionContext
|
||||
from .. import pool
|
||||
from .. import util
|
||||
from ..engine import ConnectArgsType
|
||||
from ..engine import Connection
|
||||
from ..engine import interfaces
|
||||
from ..engine import URL
|
||||
from ..sql.type_api import TypeEngine
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ..engine.interfaces import IsolationLevel
|
||||
|
||||
|
||||
class PyODBCConnector(Connector):
|
||||
driver = "pyodbc"
|
||||
|
||||
# this is no longer False for pyodbc in general
|
||||
supports_sane_rowcount_returning = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_native_decimal = True
|
||||
default_paramstyle = "named"
|
||||
|
||||
fast_executemany = False
|
||||
|
||||
# for non-DSN connections, this *may* be used to
|
||||
# hold the desired driver name
|
||||
pyodbc_driver_name: Optional[str] = None
|
||||
|
||||
dbapi: ModuleType
|
||||
|
||||
def __init__(self, use_setinputsizes: bool = False, **kw: Any):
|
||||
super().__init__(**kw)
|
||||
if use_setinputsizes:
|
||||
self.bind_typing = interfaces.BindTyping.SETINPUTSIZES
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls) -> ModuleType:
|
||||
return __import__("pyodbc")
|
||||
|
||||
def create_connect_args(self, url: URL) -> ConnectArgsType:
|
||||
opts = url.translate_connect_args(username="user")
|
||||
opts.update(url.query)
|
||||
|
||||
keys = opts
|
||||
|
||||
query = url.query
|
||||
|
||||
connect_args: Dict[str, Any] = {}
|
||||
connectors: List[str]
|
||||
|
||||
for param in ("ansi", "unicode_results", "autocommit"):
|
||||
if param in keys:
|
||||
connect_args[param] = util.asbool(keys.pop(param))
|
||||
|
||||
if "odbc_connect" in keys:
|
||||
connectors = [unquote_plus(keys.pop("odbc_connect"))]
|
||||
else:
|
||||
|
||||
def check_quote(token: str) -> str:
|
||||
if ";" in str(token) or str(token).startswith("{"):
|
||||
token = "{%s}" % token.replace("}", "}}")
|
||||
return token
|
||||
|
||||
keys = {k: check_quote(v) for k, v in keys.items()}
|
||||
|
||||
dsn_connection = "dsn" in keys or (
|
||||
"host" in keys and "database" not in keys
|
||||
)
|
||||
if dsn_connection:
|
||||
connectors = [
|
||||
"dsn=%s" % (keys.pop("host", "") or keys.pop("dsn", ""))
|
||||
]
|
||||
else:
|
||||
port = ""
|
||||
if "port" in keys and "port" not in query:
|
||||
port = ",%d" % int(keys.pop("port"))
|
||||
|
||||
connectors = []
|
||||
driver = keys.pop("driver", self.pyodbc_driver_name)
|
||||
if driver is None and keys:
|
||||
# note if keys is empty, this is a totally blank URL
|
||||
util.warn(
|
||||
"No driver name specified; "
|
||||
"this is expected by PyODBC when using "
|
||||
"DSN-less connections"
|
||||
)
|
||||
else:
|
||||
connectors.append("DRIVER={%s}" % driver)
|
||||
|
||||
connectors.extend(
|
||||
[
|
||||
"Server=%s%s" % (keys.pop("host", ""), port),
|
||||
"Database=%s" % keys.pop("database", ""),
|
||||
]
|
||||
)
|
||||
|
||||
user = keys.pop("user", None)
|
||||
if user:
|
||||
connectors.append("UID=%s" % user)
|
||||
pwd = keys.pop("password", "")
|
||||
if pwd:
|
||||
connectors.append("PWD=%s" % pwd)
|
||||
else:
|
||||
authentication = keys.pop("authentication", None)
|
||||
if authentication:
|
||||
connectors.append("Authentication=%s" % authentication)
|
||||
else:
|
||||
connectors.append("Trusted_Connection=Yes")
|
||||
|
||||
# if set to 'Yes', the ODBC layer will try to automagically
|
||||
# convert textual data from your database encoding to your
|
||||
# client encoding. This should obviously be set to 'No' if
|
||||
# you query a cp1253 encoded database from a latin1 client...
|
||||
if "odbc_autotranslate" in keys:
|
||||
connectors.append(
|
||||
"AutoTranslate=%s" % keys.pop("odbc_autotranslate")
|
||||
)
|
||||
|
||||
connectors.extend(["%s=%s" % (k, v) for k, v in keys.items()])
|
||||
|
||||
return ((";".join(connectors),), connect_args)
|
||||
|
||||
def is_disconnect(
|
||||
self,
|
||||
e: Exception,
|
||||
connection: Optional[
|
||||
Union[pool.PoolProxiedConnection, interfaces.DBAPIConnection]
|
||||
],
|
||||
cursor: Optional[interfaces.DBAPICursor],
|
||||
) -> bool:
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return "The cursor's connection has been closed." in str(
|
||||
e
|
||||
) or "Attempt to use a closed connection." in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _dbapi_version(self) -> interfaces.VersionInfoType:
|
||||
if not self.dbapi:
|
||||
return ()
|
||||
return self._parse_dbapi_version(self.dbapi.version)
|
||||
|
||||
def _parse_dbapi_version(self, vers: str) -> interfaces.VersionInfoType:
|
||||
m = re.match(r"(?:py.*-)?([\d\.]+)(?:-(\w+))?", vers)
|
||||
if not m:
|
||||
return ()
|
||||
vers_tuple: interfaces.VersionInfoType = tuple(
|
||||
[int(x) for x in m.group(1).split(".")]
|
||||
)
|
||||
if m.group(2):
|
||||
vers_tuple += (m.group(2),)
|
||||
return vers_tuple
|
||||
|
||||
def _get_server_version_info(
|
||||
self, connection: Connection
|
||||
) -> interfaces.VersionInfoType:
|
||||
# NOTE: this function is not reliable, particularly when
|
||||
# freetds is in use. Implement database-specific server version
|
||||
# queries.
|
||||
dbapi_con = connection.connection.dbapi_connection
|
||||
version: Tuple[Union[int, str], ...] = ()
|
||||
r = re.compile(r"[.\-]")
|
||||
for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)): # type: ignore[union-attr] # noqa: E501
|
||||
try:
|
||||
version += (int(n),)
|
||||
except ValueError:
|
||||
pass
|
||||
return tuple(version)
|
||||
|
||||
def do_set_input_sizes(
|
||||
self,
|
||||
cursor: interfaces.DBAPICursor,
|
||||
list_of_tuples: List[Tuple[str, Any, TypeEngine[Any]]],
|
||||
context: ExecutionContext,
|
||||
) -> None:
|
||||
# the rules for these types seems a little strange, as you can pass
|
||||
# non-tuples as well as tuples, however it seems to assume "0"
|
||||
# for the subsequent values if you don't pass a tuple which fails
|
||||
# for types such as pyodbc.SQL_WLONGVARCHAR, which is the datatype
|
||||
# that ticket #5649 is targeting.
|
||||
|
||||
# NOTE: as of #6058, this won't be called if the use_setinputsizes
|
||||
# parameter were not passed to the dialect, or if no types were
|
||||
# specified in list_of_tuples
|
||||
|
||||
# as of #8177 for 2.0 we assume use_setinputsizes=True and only
|
||||
# omit the setinputsizes calls for .executemany() with
|
||||
# fast_executemany=True
|
||||
|
||||
if (
|
||||
context.execute_style is interfaces.ExecuteStyle.EXECUTEMANY
|
||||
and self.fast_executemany
|
||||
):
|
||||
return
|
||||
|
||||
cursor.setinputsizes(
|
||||
[
|
||||
(
|
||||
(dbtype, None, None)
|
||||
if not isinstance(dbtype, tuple)
|
||||
else dbtype
|
||||
)
|
||||
for key, dbtype, sqltype in list_of_tuples
|
||||
]
|
||||
)
|
||||
|
||||
def get_isolation_level_values(
|
||||
self, dbapi_conn: interfaces.DBAPIConnection
|
||||
) -> List[IsolationLevel]:
|
||||
return [*super().get_isolation_level_values(dbapi_conn), "AUTOCOMMIT"]
|
||||
|
||||
def set_isolation_level(
|
||||
self,
|
||||
dbapi_connection: interfaces.DBAPIConnection,
|
||||
level: IsolationLevel,
|
||||
) -> None:
|
||||
# adjust for ConnectionFairy being present
|
||||
# allows attribute set e.g. "connection.autocommit = True"
|
||||
# to work properly
|
||||
|
||||
if level == "AUTOCOMMIT":
|
||||
dbapi_connection.autocommit = True
|
||||
else:
|
||||
dbapi_connection.autocommit = False
|
||||
super().set_isolation_level(dbapi_connection, level)
|
@ -0,0 +1,6 @@
|
||||
# cyextension/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,409 @@
|
||||
# cyextension/collections.pyx
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
cimport cython
|
||||
from cpython.long cimport PyLong_FromLongLong
|
||||
from cpython.set cimport PySet_Add
|
||||
|
||||
from collections.abc import Collection
|
||||
from itertools import filterfalse
|
||||
|
||||
cdef bint add_not_present(set seen, object item, hashfunc):
|
||||
hash_value = hashfunc(item)
|
||||
if hash_value not in seen:
|
||||
PySet_Add(seen, hash_value)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
cdef list cunique_list(seq, hashfunc=None):
|
||||
cdef set seen = set()
|
||||
if not hashfunc:
|
||||
return [x for x in seq if x not in seen and not PySet_Add(seen, x)]
|
||||
else:
|
||||
return [x for x in seq if add_not_present(seen, x, hashfunc)]
|
||||
|
||||
def unique_list(seq, hashfunc=None):
|
||||
return cunique_list(seq, hashfunc)
|
||||
|
||||
cdef class OrderedSet(set):
|
||||
|
||||
cdef list _list
|
||||
|
||||
@classmethod
|
||||
def __class_getitem__(cls, key):
|
||||
return cls
|
||||
|
||||
def __init__(self, d=None):
|
||||
set.__init__(self)
|
||||
if d is not None:
|
||||
self._list = cunique_list(d)
|
||||
set.update(self, self._list)
|
||||
else:
|
||||
self._list = []
|
||||
|
||||
cpdef OrderedSet copy(self):
|
||||
cdef OrderedSet cp = OrderedSet.__new__(OrderedSet)
|
||||
cp._list = list(self._list)
|
||||
set.update(cp, cp._list)
|
||||
return cp
|
||||
|
||||
@cython.final
|
||||
cdef OrderedSet _from_list(self, list new_list):
|
||||
cdef OrderedSet new = OrderedSet.__new__(OrderedSet)
|
||||
new._list = new_list
|
||||
set.update(new, new_list)
|
||||
return new
|
||||
|
||||
def add(self, element):
|
||||
if element not in self:
|
||||
self._list.append(element)
|
||||
PySet_Add(self, element)
|
||||
|
||||
def remove(self, element):
|
||||
# set.remove will raise if element is not in self
|
||||
set.remove(self, element)
|
||||
self._list.remove(element)
|
||||
|
||||
def pop(self):
|
||||
try:
|
||||
value = self._list.pop()
|
||||
except IndexError:
|
||||
raise KeyError("pop from an empty set") from None
|
||||
set.remove(self, value)
|
||||
return value
|
||||
|
||||
def insert(self, Py_ssize_t pos, element):
|
||||
if element not in self:
|
||||
self._list.insert(pos, element)
|
||||
PySet_Add(self, element)
|
||||
|
||||
def discard(self, element):
|
||||
if element in self:
|
||||
set.remove(self, element)
|
||||
self._list.remove(element)
|
||||
|
||||
def clear(self):
|
||||
set.clear(self)
|
||||
self._list = []
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._list[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._list)
|
||||
|
||||
def __add__(self, other):
|
||||
return self.union(other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__.__name__, self._list)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def update(self, *iterables):
|
||||
for iterable in iterables:
|
||||
for e in iterable:
|
||||
if e not in self:
|
||||
self._list.append(e)
|
||||
set.add(self, e)
|
||||
|
||||
def __ior__(self, iterable):
|
||||
self.update(iterable)
|
||||
return self
|
||||
|
||||
def union(self, *other):
|
||||
result = self.copy()
|
||||
result.update(*other)
|
||||
return result
|
||||
|
||||
def __or__(self, other):
|
||||
return self.union(other)
|
||||
|
||||
def intersection(self, *other):
|
||||
cdef set other_set = set.intersection(self, *other)
|
||||
return self._from_list([a for a in self._list if a in other_set])
|
||||
|
||||
def __and__(self, other):
|
||||
return self.intersection(other)
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
cdef set other_set
|
||||
if isinstance(other, set):
|
||||
other_set = <set> other
|
||||
collection = other_set
|
||||
elif isinstance(other, Collection):
|
||||
collection = other
|
||||
other_set = set(other)
|
||||
else:
|
||||
collection = list(other)
|
||||
other_set = set(collection)
|
||||
result = self._from_list([a for a in self._list if a not in other_set])
|
||||
result.update(a for a in collection if a not in self)
|
||||
return result
|
||||
|
||||
def __xor__(self, other):
|
||||
return self.symmetric_difference(other)
|
||||
|
||||
def difference(self, *other):
|
||||
cdef set other_set = set.difference(self, *other)
|
||||
return self._from_list([a for a in self._list if a in other_set])
|
||||
|
||||
def __sub__(self, other):
|
||||
return self.difference(other)
|
||||
|
||||
def intersection_update(self, *other):
|
||||
set.intersection_update(self, *other)
|
||||
self._list = [a for a in self._list if a in self]
|
||||
|
||||
def __iand__(self, other):
|
||||
self.intersection_update(other)
|
||||
return self
|
||||
|
||||
cpdef symmetric_difference_update(self, other):
|
||||
collection = other if isinstance(other, Collection) else list(other)
|
||||
set.symmetric_difference_update(self, collection)
|
||||
self._list = [a for a in self._list if a in self]
|
||||
self._list += [a for a in collection if a in self]
|
||||
|
||||
def __ixor__(self, other):
|
||||
self.symmetric_difference_update(other)
|
||||
return self
|
||||
|
||||
def difference_update(self, *other):
|
||||
set.difference_update(self, *other)
|
||||
self._list = [a for a in self._list if a in self]
|
||||
|
||||
def __isub__(self, other):
|
||||
self.difference_update(other)
|
||||
return self
|
||||
|
||||
cdef object cy_id(object item):
|
||||
return PyLong_FromLongLong(<long long> (<void *>item))
|
||||
|
||||
# NOTE: cython 0.x will call __add__, __sub__, etc with the parameter swapped
|
||||
# instead of the __rmeth__, so they need to check that also self is of the
|
||||
# correct type. This is fixed in cython 3.x. See:
|
||||
# https://docs.cython.org/en/latest/src/userguide/special_methods.html#arithmetic-methods
|
||||
cdef class IdentitySet:
|
||||
"""A set that considers only object id() for uniqueness.
|
||||
|
||||
This strategy has edge cases for builtin types- it's possible to have
|
||||
two 'foo' strings in one of these sets, for example. Use sparingly.
|
||||
|
||||
"""
|
||||
|
||||
cdef dict _members
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self._members = {}
|
||||
if iterable:
|
||||
self.update(iterable)
|
||||
|
||||
def add(self, value):
|
||||
self._members[cy_id(value)] = value
|
||||
|
||||
def __contains__(self, value):
|
||||
return cy_id(value) in self._members
|
||||
|
||||
cpdef remove(self, value):
|
||||
del self._members[cy_id(value)]
|
||||
|
||||
def discard(self, value):
|
||||
try:
|
||||
self.remove(value)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def pop(self):
|
||||
cdef tuple pair
|
||||
try:
|
||||
pair = self._members.popitem()
|
||||
return pair[1]
|
||||
except KeyError:
|
||||
raise KeyError("pop from an empty set")
|
||||
|
||||
def clear(self):
|
||||
self._members.clear()
|
||||
|
||||
def __eq__(self, other):
|
||||
cdef IdentitySet other_
|
||||
if isinstance(other, IdentitySet):
|
||||
other_ = other
|
||||
return self._members == other_._members
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
cdef IdentitySet other_
|
||||
if isinstance(other, IdentitySet):
|
||||
other_ = other
|
||||
return self._members != other_._members
|
||||
else:
|
||||
return True
|
||||
|
||||
cpdef issubset(self, iterable):
|
||||
cdef IdentitySet other
|
||||
if isinstance(iterable, self.__class__):
|
||||
other = iterable
|
||||
else:
|
||||
other = self.__class__(iterable)
|
||||
|
||||
if len(self) > len(other):
|
||||
return False
|
||||
for m in filterfalse(other._members.__contains__, self._members):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.issubset(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return len(self) < len(other) and self.issubset(other)
|
||||
|
||||
cpdef issuperset(self, iterable):
|
||||
cdef IdentitySet other
|
||||
if isinstance(iterable, self.__class__):
|
||||
other = iterable
|
||||
else:
|
||||
other = self.__class__(iterable)
|
||||
|
||||
if len(self) < len(other):
|
||||
return False
|
||||
for m in filterfalse(self._members.__contains__, other._members):
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.issuperset(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
return len(self) > len(other) and self.issuperset(other)
|
||||
|
||||
cpdef IdentitySet union(self, iterable):
|
||||
cdef IdentitySet result = self.__class__()
|
||||
result._members.update(self._members)
|
||||
result.update(iterable)
|
||||
return result
|
||||
|
||||
def __or__(self, other):
|
||||
if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.union(other)
|
||||
|
||||
cpdef update(self, iterable):
|
||||
for obj in iterable:
|
||||
self._members[cy_id(obj)] = obj
|
||||
|
||||
def __ior__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.update(other)
|
||||
return self
|
||||
|
||||
cpdef IdentitySet difference(self, iterable):
|
||||
cdef IdentitySet result = self.__new__(self.__class__)
|
||||
if isinstance(iterable, self.__class__):
|
||||
other = (<IdentitySet>iterable)._members
|
||||
else:
|
||||
other = {cy_id(obj) for obj in iterable}
|
||||
result._members = {k:v for k, v in self._members.items() if k not in other}
|
||||
return result
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.difference(other)
|
||||
|
||||
cpdef difference_update(self, iterable):
|
||||
cdef IdentitySet other = self.difference(iterable)
|
||||
self._members = other._members
|
||||
|
||||
def __isub__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.difference_update(other)
|
||||
return self
|
||||
|
||||
cpdef IdentitySet intersection(self, iterable):
|
||||
cdef IdentitySet result = self.__new__(self.__class__)
|
||||
if isinstance(iterable, self.__class__):
|
||||
other = (<IdentitySet>iterable)._members
|
||||
else:
|
||||
other = {cy_id(obj) for obj in iterable}
|
||||
result._members = {k: v for k, v in self._members.items() if k in other}
|
||||
return result
|
||||
|
||||
def __and__(self, other):
|
||||
if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.intersection(other)
|
||||
|
||||
cpdef intersection_update(self, iterable):
|
||||
cdef IdentitySet other = self.intersection(iterable)
|
||||
self._members = other._members
|
||||
|
||||
def __iand__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.intersection_update(other)
|
||||
return self
|
||||
|
||||
cpdef IdentitySet symmetric_difference(self, iterable):
|
||||
cdef IdentitySet result = self.__new__(self.__class__)
|
||||
cdef dict other
|
||||
if isinstance(iterable, self.__class__):
|
||||
other = (<IdentitySet>iterable)._members
|
||||
else:
|
||||
other = {cy_id(obj): obj for obj in iterable}
|
||||
result._members = {k: v for k, v in self._members.items() if k not in other}
|
||||
result._members.update(
|
||||
[(k, v) for k, v in other.items() if k not in self._members]
|
||||
)
|
||||
return result
|
||||
|
||||
def __xor__(self, other):
|
||||
if not isinstance(other, IdentitySet) or not isinstance(self, IdentitySet):
|
||||
return NotImplemented
|
||||
return self.symmetric_difference(other)
|
||||
|
||||
cpdef symmetric_difference_update(self, iterable):
|
||||
cdef IdentitySet other = self.symmetric_difference(iterable)
|
||||
self._members = other._members
|
||||
|
||||
def __ixor__(self, other):
|
||||
if not isinstance(other, IdentitySet):
|
||||
return NotImplemented
|
||||
self.symmetric_difference(other)
|
||||
return self
|
||||
|
||||
cpdef IdentitySet copy(self):
|
||||
cdef IdentitySet cp = self.__new__(self.__class__)
|
||||
cp._members = self._members.copy()
|
||||
return cp
|
||||
|
||||
def __copy__(self):
|
||||
return self.copy()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._members)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._members.values())
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError("set objects are unhashable")
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (type(self).__name__, list(self._members.values()))
|
Binary file not shown.
@ -0,0 +1,8 @@
|
||||
# cyextension/immutabledict.pxd
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
cdef class immutabledict(dict):
|
||||
pass
|
@ -0,0 +1,133 @@
|
||||
# cyextension/immutabledict.pyx
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from cpython.dict cimport PyDict_New, PyDict_Update, PyDict_Size
|
||||
|
||||
|
||||
def _readonly_fn(obj):
|
||||
raise TypeError(
|
||||
"%s object is immutable and/or readonly" % obj.__class__.__name__)
|
||||
|
||||
|
||||
def _immutable_fn(obj):
|
||||
raise TypeError(
|
||||
"%s object is immutable" % obj.__class__.__name__)
|
||||
|
||||
|
||||
class ReadOnlyContainer:
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def _readonly(self, *a,**kw):
|
||||
_readonly_fn(self)
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = _readonly
|
||||
|
||||
|
||||
class ImmutableDictBase(dict):
|
||||
def _immutable(self, *a,**kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
@classmethod
|
||||
def __class_getitem__(cls, key):
|
||||
return cls
|
||||
|
||||
__delitem__ = __setitem__ = __setattr__ = _immutable
|
||||
clear = pop = popitem = setdefault = update = _immutable
|
||||
|
||||
|
||||
cdef class immutabledict(dict):
|
||||
def __repr__(self):
|
||||
return f"immutabledict({dict.__repr__(self)})"
|
||||
|
||||
@classmethod
|
||||
def __class_getitem__(cls, key):
|
||||
return cls
|
||||
|
||||
def union(self, *args, **kw):
|
||||
cdef dict to_merge = None
|
||||
cdef immutabledict result
|
||||
cdef Py_ssize_t args_len = len(args)
|
||||
if args_len > 1:
|
||||
raise TypeError(
|
||||
f'union expected at most 1 argument, got {args_len}'
|
||||
)
|
||||
if args_len == 1:
|
||||
attribute = args[0]
|
||||
if isinstance(attribute, dict):
|
||||
to_merge = <dict> attribute
|
||||
if to_merge is None:
|
||||
to_merge = dict(*args, **kw)
|
||||
|
||||
if PyDict_Size(to_merge) == 0:
|
||||
return self
|
||||
|
||||
# new + update is faster than immutabledict(self)
|
||||
result = immutabledict()
|
||||
PyDict_Update(result, self)
|
||||
PyDict_Update(result, to_merge)
|
||||
return result
|
||||
|
||||
def merge_with(self, *other):
|
||||
cdef immutabledict result = None
|
||||
cdef object d
|
||||
cdef bint update = False
|
||||
if not other:
|
||||
return self
|
||||
for d in other:
|
||||
if d:
|
||||
if update == False:
|
||||
update = True
|
||||
# new + update is faster than immutabledict(self)
|
||||
result = immutabledict()
|
||||
PyDict_Update(result, self)
|
||||
PyDict_Update(
|
||||
result, <dict>(d if isinstance(d, dict) else dict(d))
|
||||
)
|
||||
|
||||
return self if update == False else result
|
||||
|
||||
def copy(self):
|
||||
return self
|
||||
|
||||
def __reduce__(self):
|
||||
return immutabledict, (dict(self), )
|
||||
|
||||
def __delitem__(self, k):
|
||||
_immutable_fn(self)
|
||||
|
||||
def __setitem__(self, k, v):
|
||||
_immutable_fn(self)
|
||||
|
||||
def __setattr__(self, k, v):
|
||||
_immutable_fn(self)
|
||||
|
||||
def clear(self, *args, **kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
def pop(self, *args, **kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
def popitem(self, *args, **kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
def setdefault(self, *args, **kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
def update(self, *args, **kw):
|
||||
_immutable_fn(self)
|
||||
|
||||
# PEP 584
|
||||
def __ior__(self, other):
|
||||
_immutable_fn(self)
|
||||
|
||||
def __or__(self, other):
|
||||
return immutabledict(dict.__or__(self, other))
|
||||
|
||||
def __ror__(self, other):
|
||||
# NOTE: this is used only in cython 3.x;
|
||||
# version 0.x will call __or__ with args inversed
|
||||
return immutabledict(dict.__ror__(self, other))
|
Binary file not shown.
@ -0,0 +1,68 @@
|
||||
# cyextension/processors.pyx
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
import datetime
|
||||
from datetime import datetime as datetime_cls
|
||||
from datetime import time as time_cls
|
||||
from datetime import date as date_cls
|
||||
import re
|
||||
|
||||
from cpython.object cimport PyObject_Str
|
||||
from cpython.unicode cimport PyUnicode_AsASCIIString, PyUnicode_Check, PyUnicode_Decode
|
||||
from libc.stdio cimport sscanf
|
||||
|
||||
|
||||
def int_to_boolean(value):
|
||||
if value is None:
|
||||
return None
|
||||
return True if value else False
|
||||
|
||||
def to_str(value):
|
||||
return PyObject_Str(value) if value is not None else None
|
||||
|
||||
def to_float(value):
|
||||
return float(value) if value is not None else None
|
||||
|
||||
cdef inline bytes to_bytes(object value, str type_name):
|
||||
try:
|
||||
return PyUnicode_AsASCIIString(value)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Couldn't parse {type_name} string '{value!r}' "
|
||||
"- value is not a string."
|
||||
) from e
|
||||
|
||||
def str_to_datetime(value):
|
||||
if value is not None:
|
||||
value = datetime_cls.fromisoformat(value)
|
||||
return value
|
||||
|
||||
def str_to_time(value):
|
||||
if value is not None:
|
||||
value = time_cls.fromisoformat(value)
|
||||
return value
|
||||
|
||||
|
||||
def str_to_date(value):
|
||||
if value is not None:
|
||||
value = date_cls.fromisoformat(value)
|
||||
return value
|
||||
|
||||
|
||||
|
||||
cdef class DecimalResultProcessor:
|
||||
cdef object type_
|
||||
cdef str format_
|
||||
|
||||
def __cinit__(self, type_, format_):
|
||||
self.type_ = type_
|
||||
self.format_ = format_
|
||||
|
||||
def process(self, object value):
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return self.type_(self.format_ % value)
|
Binary file not shown.
@ -0,0 +1,102 @@
|
||||
# cyextension/resultproxy.pyx
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
import operator
|
||||
|
||||
cdef class BaseRow:
|
||||
cdef readonly object _parent
|
||||
cdef readonly dict _key_to_index
|
||||
cdef readonly tuple _data
|
||||
|
||||
def __init__(self, object parent, object processors, dict key_to_index, object data):
|
||||
"""Row objects are constructed by CursorResult objects."""
|
||||
|
||||
self._parent = parent
|
||||
|
||||
self._key_to_index = key_to_index
|
||||
|
||||
if processors:
|
||||
self._data = _apply_processors(processors, data)
|
||||
else:
|
||||
self._data = tuple(data)
|
||||
|
||||
def __reduce__(self):
|
||||
return (
|
||||
rowproxy_reconstructor,
|
||||
(self.__class__, self.__getstate__()),
|
||||
)
|
||||
|
||||
def __getstate__(self):
|
||||
return {"_parent": self._parent, "_data": self._data}
|
||||
|
||||
def __setstate__(self, dict state):
|
||||
parent = state["_parent"]
|
||||
self._parent = parent
|
||||
self._data = state["_data"]
|
||||
self._key_to_index = parent._key_to_index
|
||||
|
||||
def _values_impl(self):
|
||||
return list(self)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._data)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._data[index]
|
||||
|
||||
def _get_by_key_impl_mapping(self, key):
|
||||
return self._get_by_key_impl(key, 0)
|
||||
|
||||
cdef _get_by_key_impl(self, object key, int attr_err):
|
||||
index = self._key_to_index.get(key)
|
||||
if index is not None:
|
||||
return self._data[<int>index]
|
||||
self._parent._key_not_found(key, attr_err != 0)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return self._get_by_key_impl(name, 1)
|
||||
|
||||
def _to_tuple_instance(self):
|
||||
return self._data
|
||||
|
||||
|
||||
cdef tuple _apply_processors(proc, data):
|
||||
res = []
|
||||
for i in range(len(proc)):
|
||||
p = proc[i]
|
||||
if p is None:
|
||||
res.append(data[i])
|
||||
else:
|
||||
res.append(p(data[i]))
|
||||
return tuple(res)
|
||||
|
||||
|
||||
def rowproxy_reconstructor(cls, state):
|
||||
obj = cls.__new__(cls)
|
||||
obj.__setstate__(state)
|
||||
return obj
|
||||
|
||||
|
||||
cdef int is_contiguous(tuple indexes):
|
||||
cdef int i
|
||||
for i in range(1, len(indexes)):
|
||||
if indexes[i-1] != indexes[i] -1:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def tuplegetter(*indexes):
|
||||
if len(indexes) == 1 or is_contiguous(indexes) != 0:
|
||||
# slice form is faster but returns a list if input is list
|
||||
return operator.itemgetter(slice(indexes[0], indexes[-1] + 1))
|
||||
else:
|
||||
return operator.itemgetter(*indexes)
|
Binary file not shown.
@ -0,0 +1,91 @@
|
||||
# cyextension/util.pyx
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from collections.abc import Mapping
|
||||
|
||||
from sqlalchemy import exc
|
||||
|
||||
cdef tuple _Empty_Tuple = ()
|
||||
|
||||
cdef inline bint _mapping_or_tuple(object value):
|
||||
return isinstance(value, dict) or isinstance(value, tuple) or isinstance(value, Mapping)
|
||||
|
||||
cdef inline bint _check_item(object params) except 0:
|
||||
cdef object item
|
||||
cdef bint ret = 1
|
||||
if params:
|
||||
item = params[0]
|
||||
if not _mapping_or_tuple(item):
|
||||
ret = 0
|
||||
raise exc.ArgumentError(
|
||||
"List argument must consist only of tuples or dictionaries"
|
||||
)
|
||||
return ret
|
||||
|
||||
def _distill_params_20(object params):
|
||||
if params is None:
|
||||
return _Empty_Tuple
|
||||
elif isinstance(params, list) or isinstance(params, tuple):
|
||||
_check_item(params)
|
||||
return params
|
||||
elif isinstance(params, dict) or isinstance(params, Mapping):
|
||||
return [params]
|
||||
else:
|
||||
raise exc.ArgumentError("mapping or list expected for parameters")
|
||||
|
||||
|
||||
def _distill_raw_params(object params):
|
||||
if params is None:
|
||||
return _Empty_Tuple
|
||||
elif isinstance(params, list):
|
||||
_check_item(params)
|
||||
return params
|
||||
elif _mapping_or_tuple(params):
|
||||
return [params]
|
||||
else:
|
||||
raise exc.ArgumentError("mapping or sequence expected for parameters")
|
||||
|
||||
cdef class prefix_anon_map(dict):
|
||||
def __missing__(self, str key):
|
||||
cdef str derived
|
||||
cdef int anonymous_counter
|
||||
cdef dict self_dict = self
|
||||
|
||||
derived = key.split(" ", 1)[1]
|
||||
|
||||
anonymous_counter = self_dict.get(derived, 1)
|
||||
self_dict[derived] = anonymous_counter + 1
|
||||
value = f"{derived}_{anonymous_counter}"
|
||||
self_dict[key] = value
|
||||
return value
|
||||
|
||||
|
||||
cdef class cache_anon_map(dict):
|
||||
cdef int _index
|
||||
|
||||
def __init__(self):
|
||||
self._index = 0
|
||||
|
||||
def get_anon(self, obj):
|
||||
cdef long long idself
|
||||
cdef str id_
|
||||
cdef dict self_dict = self
|
||||
|
||||
idself = id(obj)
|
||||
if idself in self_dict:
|
||||
return self_dict[idself], True
|
||||
else:
|
||||
id_ = self.__missing__(idself)
|
||||
return id_, False
|
||||
|
||||
def __missing__(self, key):
|
||||
cdef str val
|
||||
cdef dict self_dict = self
|
||||
|
||||
self_dict[key] = val = str(self._index)
|
||||
self._index += 1
|
||||
return val
|
||||
|
@ -0,0 +1,61 @@
|
||||
# dialects/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .. import util
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..engine.interfaces import Dialect
|
||||
|
||||
__all__ = ("mssql", "mysql", "oracle", "postgresql", "sqlite")
|
||||
|
||||
|
||||
def _auto_fn(name: str) -> Optional[Callable[[], Type[Dialect]]]:
|
||||
"""default dialect importer.
|
||||
|
||||
plugs into the :class:`.PluginLoader`
|
||||
as a first-hit system.
|
||||
|
||||
"""
|
||||
if "." in name:
|
||||
dialect, driver = name.split(".")
|
||||
else:
|
||||
dialect = name
|
||||
driver = "base"
|
||||
|
||||
try:
|
||||
if dialect == "mariadb":
|
||||
# it's "OK" for us to hardcode here since _auto_fn is already
|
||||
# hardcoded. if mysql / mariadb etc were third party dialects
|
||||
# they would just publish all the entrypoints, which would actually
|
||||
# look much nicer.
|
||||
module = __import__(
|
||||
"sqlalchemy.dialects.mysql.mariadb"
|
||||
).dialects.mysql.mariadb
|
||||
return module.loader(driver) # type: ignore
|
||||
else:
|
||||
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
|
||||
module = getattr(module, dialect)
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
if hasattr(module, driver):
|
||||
module = getattr(module, driver)
|
||||
return lambda: module.dialect
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
|
||||
|
||||
plugins = util.PluginLoader("sqlalchemy.plugins")
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,30 @@
|
||||
# dialects/_typing.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Iterable
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from ..sql import roles
|
||||
from ..sql.base import ColumnCollection
|
||||
from ..sql.schema import Column
|
||||
from ..sql.schema import ColumnCollectionConstraint
|
||||
from ..sql.schema import Index
|
||||
|
||||
|
||||
_OnConflictConstraintT = Union[str, ColumnCollectionConstraint, Index, None]
|
||||
_OnConflictIndexElementsT = Optional[
|
||||
Iterable[Union[Column[Any], str, roles.DDLConstraintColumnRole]]
|
||||
]
|
||||
_OnConflictIndexWhereT = Optional[roles.WhereHavingRole]
|
||||
_OnConflictSetT = Optional[
|
||||
Union[Mapping[Any, Any], ColumnCollection[Any, Any]]
|
||||
]
|
||||
_OnConflictWhereT = Optional[roles.WhereHavingRole]
|
@ -0,0 +1,88 @@
|
||||
# dialects/mssql/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from . import aioodbc # noqa
|
||||
from . import base # noqa
|
||||
from . import pymssql # noqa
|
||||
from . import pyodbc # noqa
|
||||
from .base import BIGINT
|
||||
from .base import BINARY
|
||||
from .base import BIT
|
||||
from .base import CHAR
|
||||
from .base import DATE
|
||||
from .base import DATETIME
|
||||
from .base import DATETIME2
|
||||
from .base import DATETIMEOFFSET
|
||||
from .base import DECIMAL
|
||||
from .base import DOUBLE_PRECISION
|
||||
from .base import FLOAT
|
||||
from .base import IMAGE
|
||||
from .base import INTEGER
|
||||
from .base import JSON
|
||||
from .base import MONEY
|
||||
from .base import NCHAR
|
||||
from .base import NTEXT
|
||||
from .base import NUMERIC
|
||||
from .base import NVARCHAR
|
||||
from .base import REAL
|
||||
from .base import ROWVERSION
|
||||
from .base import SMALLDATETIME
|
||||
from .base import SMALLINT
|
||||
from .base import SMALLMONEY
|
||||
from .base import SQL_VARIANT
|
||||
from .base import TEXT
|
||||
from .base import TIME
|
||||
from .base import TIMESTAMP
|
||||
from .base import TINYINT
|
||||
from .base import UNIQUEIDENTIFIER
|
||||
from .base import VARBINARY
|
||||
from .base import VARCHAR
|
||||
from .base import XML
|
||||
from ...sql import try_cast
|
||||
|
||||
|
||||
base.dialect = dialect = pyodbc.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
"JSON",
|
||||
"INTEGER",
|
||||
"BIGINT",
|
||||
"SMALLINT",
|
||||
"TINYINT",
|
||||
"VARCHAR",
|
||||
"NVARCHAR",
|
||||
"CHAR",
|
||||
"NCHAR",
|
||||
"TEXT",
|
||||
"NTEXT",
|
||||
"DECIMAL",
|
||||
"NUMERIC",
|
||||
"FLOAT",
|
||||
"DATETIME",
|
||||
"DATETIME2",
|
||||
"DATETIMEOFFSET",
|
||||
"DATE",
|
||||
"DOUBLE_PRECISION",
|
||||
"TIME",
|
||||
"SMALLDATETIME",
|
||||
"BINARY",
|
||||
"VARBINARY",
|
||||
"BIT",
|
||||
"REAL",
|
||||
"IMAGE",
|
||||
"TIMESTAMP",
|
||||
"ROWVERSION",
|
||||
"MONEY",
|
||||
"SMALLMONEY",
|
||||
"UNIQUEIDENTIFIER",
|
||||
"SQL_VARIANT",
|
||||
"XML",
|
||||
"dialect",
|
||||
"try_cast",
|
||||
)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,63 @@
|
||||
# dialects/mssql/aioodbc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
r"""
|
||||
.. dialect:: mssql+aioodbc
|
||||
:name: aioodbc
|
||||
:dbapi: aioodbc
|
||||
:connectstring: mssql+aioodbc://<username>:<password>@<dsnname>
|
||||
:url: https://pypi.org/project/aioodbc/
|
||||
|
||||
|
||||
Support for the SQL Server database in asyncio style, using the aioodbc
|
||||
driver which itself is a thread-wrapper around pyodbc.
|
||||
|
||||
.. versionadded:: 2.0.23 Added the mssql+aioodbc dialect which builds
|
||||
on top of the pyodbc and general aio* dialect architecture.
|
||||
|
||||
Using a special asyncio mediation layer, the aioodbc dialect is usable
|
||||
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
|
||||
extension package.
|
||||
|
||||
Most behaviors and caveats for this driver are the same as that of the
|
||||
pyodbc dialect used on SQL Server; see :ref:`mssql_pyodbc` for general
|
||||
background.
|
||||
|
||||
This dialect should normally be used only with the
|
||||
:func:`_asyncio.create_async_engine` engine creation function; connection
|
||||
styles are otherwise equivalent to those documented in the pyodbc section::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
engine = create_async_engine(
|
||||
"mssql+aioodbc://scott:tiger@mssql2017:1433/test?"
|
||||
"driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .pyodbc import MSDialect_pyodbc
|
||||
from .pyodbc import MSExecutionContext_pyodbc
|
||||
from ...connectors.aioodbc import aiodbcConnector
|
||||
|
||||
|
||||
class MSExecutionContext_aioodbc(MSExecutionContext_pyodbc):
|
||||
def create_server_side_cursor(self):
|
||||
return self._dbapi_connection.cursor(server_side=True)
|
||||
|
||||
|
||||
class MSDialectAsync_aioodbc(aiodbcConnector, MSDialect_pyodbc):
|
||||
driver = "aioodbc"
|
||||
|
||||
supports_statement_cache = True
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_aioodbc
|
||||
|
||||
|
||||
dialect = MSDialectAsync_aioodbc
|
4058
venv/lib/python3.11/site-packages/sqlalchemy/dialects/mssql/base.py
Normal file
4058
venv/lib/python3.11/site-packages/sqlalchemy/dialects/mssql/base.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,254 @@
|
||||
# dialects/mssql/information_schema.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import cast
|
||||
from ... import Column
|
||||
from ... import MetaData
|
||||
from ... import Table
|
||||
from ...ext.compiler import compiles
|
||||
from ...sql import expression
|
||||
from ...types import Boolean
|
||||
from ...types import Integer
|
||||
from ...types import Numeric
|
||||
from ...types import NVARCHAR
|
||||
from ...types import String
|
||||
from ...types import TypeDecorator
|
||||
from ...types import Unicode
|
||||
|
||||
|
||||
ischema = MetaData()
|
||||
|
||||
|
||||
class CoerceUnicode(TypeDecorator):
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return _cast_on_2005(bindvalue)
|
||||
|
||||
|
||||
class _cast_on_2005(expression.ColumnElement):
|
||||
def __init__(self, bindvalue):
|
||||
self.bindvalue = bindvalue
|
||||
|
||||
|
||||
@compiles(_cast_on_2005)
|
||||
def _compile(element, compiler, **kw):
|
||||
from . import base
|
||||
|
||||
if (
|
||||
compiler.dialect.server_version_info is None
|
||||
or compiler.dialect.server_version_info < base.MS_2005_VERSION
|
||||
):
|
||||
return compiler.process(element.bindvalue, **kw)
|
||||
else:
|
||||
return compiler.process(cast(element.bindvalue, Unicode), **kw)
|
||||
|
||||
|
||||
schemata = Table(
|
||||
"SCHEMATA",
|
||||
ischema,
|
||||
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
|
||||
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
|
||||
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
tables = Table(
|
||||
"TABLES",
|
||||
ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("TABLE_TYPE", CoerceUnicode, key="table_type"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
columns = Table(
|
||||
"COLUMNS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column(
|
||||
"CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"
|
||||
),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
mssql_temp_table_columns = Table(
|
||||
"COLUMNS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column(
|
||||
"CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"
|
||||
),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="tempdb.INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
constraints = Table(
|
||||
"TABLE_CONSTRAINTS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
Column("CONSTRAINT_TYPE", CoerceUnicode, key="constraint_type"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
column_constraints = Table(
|
||||
"CONSTRAINT_COLUMN_USAGE",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
key_constraints = Table(
|
||||
"KEY_COLUMN_USAGE",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
ref_constraints = Table(
|
||||
"REFERENTIAL_CONSTRAINTS",
|
||||
ischema,
|
||||
Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
# TODO: is CATLOG misspelled ?
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_CATLOG",
|
||||
CoerceUnicode,
|
||||
key="unique_constraint_catalog",
|
||||
),
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_SCHEMA",
|
||||
CoerceUnicode,
|
||||
key="unique_constraint_schema",
|
||||
),
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_NAME", CoerceUnicode, key="unique_constraint_name"
|
||||
),
|
||||
Column("MATCH_OPTION", String, key="match_option"),
|
||||
Column("UPDATE_RULE", String, key="update_rule"),
|
||||
Column("DELETE_RULE", String, key="delete_rule"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
views = Table(
|
||||
"VIEWS",
|
||||
ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
|
||||
Column("CHECK_OPTION", String, key="check_option"),
|
||||
Column("IS_UPDATABLE", String, key="is_updatable"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
computed_columns = Table(
|
||||
"computed_columns",
|
||||
ischema,
|
||||
Column("object_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("is_computed", Boolean),
|
||||
Column("is_persisted", Boolean),
|
||||
Column("definition", CoerceUnicode),
|
||||
schema="sys",
|
||||
)
|
||||
|
||||
sequences = Table(
|
||||
"SEQUENCES",
|
||||
ischema,
|
||||
Column("SEQUENCE_CATALOG", CoerceUnicode, key="sequence_catalog"),
|
||||
Column("SEQUENCE_SCHEMA", CoerceUnicode, key="sequence_schema"),
|
||||
Column("SEQUENCE_NAME", CoerceUnicode, key="sequence_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
|
||||
class NumericSqlVariant(TypeDecorator):
|
||||
r"""This type casts sql_variant columns in the identity_columns view
|
||||
to numeric. This is required because:
|
||||
|
||||
* pyodbc does not support sql_variant
|
||||
* pymssql under python 2 return the byte representation of the number,
|
||||
int 1 is returned as "\x01\x00\x00\x00". On python 3 it returns the
|
||||
correct value as string.
|
||||
"""
|
||||
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
return cast(colexpr, Numeric(38, 0))
|
||||
|
||||
|
||||
identity_columns = Table(
|
||||
"identity_columns",
|
||||
ischema,
|
||||
Column("object_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("is_identity", Boolean),
|
||||
Column("seed_value", NumericSqlVariant),
|
||||
Column("increment_value", NumericSqlVariant),
|
||||
Column("last_value", NumericSqlVariant),
|
||||
Column("is_not_for_replication", Boolean),
|
||||
schema="sys",
|
||||
)
|
||||
|
||||
|
||||
class NVarcharSqlVariant(TypeDecorator):
|
||||
"""This type casts sql_variant columns in the extended_properties view
|
||||
to nvarchar. This is required because pyodbc does not support sql_variant
|
||||
"""
|
||||
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
return cast(colexpr, NVARCHAR)
|
||||
|
||||
|
||||
extended_properties = Table(
|
||||
"extended_properties",
|
||||
ischema,
|
||||
Column("class", Integer), # TINYINT
|
||||
Column("class_desc", CoerceUnicode),
|
||||
Column("major_id", Integer),
|
||||
Column("minor_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("value", NVarcharSqlVariant),
|
||||
schema="sys",
|
||||
)
|
@ -0,0 +1,129 @@
|
||||
# dialects/mssql/json.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import types as sqltypes
|
||||
|
||||
# technically, all the dialect-specific datatypes that don't have any special
|
||||
# behaviors would be private with names like _MSJson. However, we haven't been
|
||||
# doing this for mysql.JSON or sqlite.JSON which both have JSON / JSONIndexType
|
||||
# / JSONPathType in their json.py files, so keep consistent with that
|
||||
# sub-convention for now. A future change can update them all to be
|
||||
# package-private at once.
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""MSSQL JSON type.
|
||||
|
||||
MSSQL supports JSON-formatted data as of SQL Server 2016.
|
||||
|
||||
The :class:`_mssql.JSON` datatype at the DDL level will represent the
|
||||
datatype as ``NVARCHAR(max)``, but provides for JSON-level comparison
|
||||
functions as well as Python coercion behavior.
|
||||
|
||||
:class:`_mssql.JSON` is used automatically whenever the base
|
||||
:class:`_types.JSON` datatype is used against a SQL Server backend.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`_types.JSON` - main documentation for the generic
|
||||
cross-platform JSON datatype.
|
||||
|
||||
The :class:`_mssql.JSON` type supports persistence of JSON values
|
||||
as well as the core index operations provided by :class:`_types.JSON`
|
||||
datatype, by adapting the operations to render the ``JSON_VALUE``
|
||||
or ``JSON_QUERY`` functions at the database level.
|
||||
|
||||
The SQL Server :class:`_mssql.JSON` type necessarily makes use of the
|
||||
``JSON_QUERY`` and ``JSON_VALUE`` functions when querying for elements
|
||||
of a JSON object. These two functions have a major restriction in that
|
||||
they are **mutually exclusive** based on the type of object to be returned.
|
||||
The ``JSON_QUERY`` function **only** returns a JSON dictionary or list,
|
||||
but not an individual string, numeric, or boolean element; the
|
||||
``JSON_VALUE`` function **only** returns an individual string, numeric,
|
||||
or boolean element. **both functions either return NULL or raise
|
||||
an error if they are not used against the correct expected value**.
|
||||
|
||||
To handle this awkward requirement, indexed access rules are as follows:
|
||||
|
||||
1. When extracting a sub element from a JSON that is itself a JSON
|
||||
dictionary or list, the :meth:`_types.JSON.Comparator.as_json` accessor
|
||||
should be used::
|
||||
|
||||
stmt = select(data_table.c.data["some key"].as_json()).where(
|
||||
data_table.c.data["some key"].as_json() == {"sub": "structure"}
|
||||
)
|
||||
|
||||
2. When extracting a sub element from a JSON that is a plain boolean,
|
||||
string, integer, or float, use the appropriate method among
|
||||
:meth:`_types.JSON.Comparator.as_boolean`,
|
||||
:meth:`_types.JSON.Comparator.as_string`,
|
||||
:meth:`_types.JSON.Comparator.as_integer`,
|
||||
:meth:`_types.JSON.Comparator.as_float`::
|
||||
|
||||
stmt = select(data_table.c.data["some key"].as_string()).where(
|
||||
data_table.c.data["some key"].as_string() == "some string"
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
|
||||
"""
|
||||
|
||||
# note there was a result processor here that was looking for "number",
|
||||
# but none of the tests seem to exercise it.
|
||||
|
||||
|
||||
# Note: these objects currently match exactly those of MySQL, however since
|
||||
# these are not generalizable to all JSON implementations, remain separately
|
||||
# implemented for each dialect.
|
||||
class _FormatTypeMixin:
|
||||
def _format_value(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
|
||||
def _format_value(self, value):
|
||||
if isinstance(value, int):
|
||||
value = "$[%s]" % value
|
||||
else:
|
||||
value = '$."%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
|
||||
def _format_value(self, value):
|
||||
return "$%s" % (
|
||||
"".join(
|
||||
[
|
||||
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
|
||||
for elem in value
|
||||
]
|
||||
)
|
||||
)
|
@ -0,0 +1,162 @@
|
||||
# dialects/mssql/provision.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy import Integer
|
||||
from ... import create_engine
|
||||
from ... import exc
|
||||
from ...schema import Column
|
||||
from ...schema import DropConstraint
|
||||
from ...schema import ForeignKeyConstraint
|
||||
from ...schema import MetaData
|
||||
from ...schema import Table
|
||||
from ...testing.provision import create_db
|
||||
from ...testing.provision import drop_all_schema_objects_pre_tables
|
||||
from ...testing.provision import drop_db
|
||||
from ...testing.provision import generate_driver_url
|
||||
from ...testing.provision import get_temp_table_name
|
||||
from ...testing.provision import log
|
||||
from ...testing.provision import normalize_sequence
|
||||
from ...testing.provision import post_configure_engine
|
||||
from ...testing.provision import run_reap_dbs
|
||||
from ...testing.provision import temp_table_keyword_args
|
||||
|
||||
|
||||
@post_configure_engine.for_db("mssql")
|
||||
def post_configure_engine(url, engine, follower_ident):
|
||||
if engine.driver == "pyodbc":
|
||||
engine.dialect.dbapi.pooling = False
|
||||
|
||||
|
||||
@generate_driver_url.for_db("mssql")
|
||||
def generate_driver_url(url, driver, query_str):
|
||||
backend = url.get_backend_name()
|
||||
|
||||
new_url = url.set(drivername="%s+%s" % (backend, driver))
|
||||
|
||||
if driver not in ("pyodbc", "aioodbc"):
|
||||
new_url = new_url.set(query="")
|
||||
|
||||
if driver == "aioodbc":
|
||||
new_url = new_url.update_query_dict({"MARS_Connection": "Yes"})
|
||||
|
||||
if query_str:
|
||||
new_url = new_url.update_query_string(query_str)
|
||||
|
||||
try:
|
||||
new_url.get_dialect()
|
||||
except exc.NoSuchModuleError:
|
||||
return None
|
||||
else:
|
||||
return new_url
|
||||
|
||||
|
||||
@create_db.for_db("mssql")
|
||||
def _mssql_create_db(cfg, eng, ident):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
conn.exec_driver_sql("create database %s" % ident)
|
||||
conn.exec_driver_sql(
|
||||
"ALTER DATABASE %s SET ALLOW_SNAPSHOT_ISOLATION ON" % ident
|
||||
)
|
||||
conn.exec_driver_sql(
|
||||
"ALTER DATABASE %s SET READ_COMMITTED_SNAPSHOT ON" % ident
|
||||
)
|
||||
conn.exec_driver_sql("use %s" % ident)
|
||||
conn.exec_driver_sql("create schema test_schema")
|
||||
conn.exec_driver_sql("create schema test_schema_2")
|
||||
|
||||
|
||||
@drop_db.for_db("mssql")
|
||||
def _mssql_drop_db(cfg, eng, ident):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
_mssql_drop_ignore(conn, ident)
|
||||
|
||||
|
||||
def _mssql_drop_ignore(conn, ident):
|
||||
try:
|
||||
# typically when this happens, we can't KILL the session anyway,
|
||||
# so let the cleanup process drop the DBs
|
||||
# for row in conn.exec_driver_sql(
|
||||
# "select session_id from sys.dm_exec_sessions "
|
||||
# "where database_id=db_id('%s')" % ident):
|
||||
# log.info("killing SQL server session %s", row['session_id'])
|
||||
# conn.exec_driver_sql("kill %s" % row['session_id'])
|
||||
conn.exec_driver_sql("drop database %s" % ident)
|
||||
log.info("Reaped db: %s", ident)
|
||||
return True
|
||||
except exc.DatabaseError as err:
|
||||
log.warning("couldn't drop db: %s", err)
|
||||
return False
|
||||
|
||||
|
||||
@run_reap_dbs.for_db("mssql")
|
||||
def _reap_mssql_dbs(url, idents):
|
||||
log.info("db reaper connecting to %r", url)
|
||||
eng = create_engine(url)
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
log.info("identifiers in file: %s", ", ".join(idents))
|
||||
|
||||
to_reap = conn.exec_driver_sql(
|
||||
"select d.name from sys.databases as d where name "
|
||||
"like 'TEST_%' and not exists (select session_id "
|
||||
"from sys.dm_exec_sessions "
|
||||
"where database_id=d.database_id)"
|
||||
)
|
||||
all_names = {dbname.lower() for (dbname,) in to_reap}
|
||||
to_drop = set()
|
||||
for name in all_names:
|
||||
if name in idents:
|
||||
to_drop.add(name)
|
||||
|
||||
dropped = total = 0
|
||||
for total, dbname in enumerate(to_drop, 1):
|
||||
if _mssql_drop_ignore(conn, dbname):
|
||||
dropped += 1
|
||||
log.info(
|
||||
"Dropped %d out of %d stale databases detected", dropped, total
|
||||
)
|
||||
|
||||
|
||||
@temp_table_keyword_args.for_db("mssql")
|
||||
def _mssql_temp_table_keyword_args(cfg, eng):
|
||||
return {}
|
||||
|
||||
|
||||
@get_temp_table_name.for_db("mssql")
|
||||
def _mssql_get_temp_table_name(cfg, eng, base_name):
|
||||
return "##" + base_name
|
||||
|
||||
|
||||
@drop_all_schema_objects_pre_tables.for_db("mssql")
|
||||
def drop_all_schema_objects_pre_tables(cfg, eng):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
inspector = inspect(conn)
|
||||
for schema in (None, "dbo", cfg.test_schema, cfg.test_schema_2):
|
||||
for tname in inspector.get_table_names(schema=schema):
|
||||
tb = Table(
|
||||
tname,
|
||||
MetaData(),
|
||||
Column("x", Integer),
|
||||
Column("y", Integer),
|
||||
schema=schema,
|
||||
)
|
||||
for fk in inspect(conn).get_foreign_keys(tname, schema=schema):
|
||||
conn.execute(
|
||||
DropConstraint(
|
||||
ForeignKeyConstraint(
|
||||
[tb.c.x], [tb.c.y], name=fk["name"]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@normalize_sequence.for_db("mssql")
|
||||
def normalize_sequence(cfg, sequence):
|
||||
if sequence.start is None:
|
||||
sequence.start = 1
|
||||
return sequence
|
@ -0,0 +1,126 @@
|
||||
# dialects/mssql/pymssql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+pymssql
|
||||
:name: pymssql
|
||||
:dbapi: pymssql
|
||||
:connectstring: mssql+pymssql://<username>:<password>@<freetds_name>/?charset=utf8
|
||||
|
||||
pymssql is a Python module that provides a Python DBAPI interface around
|
||||
`FreeTDS <https://www.freetds.org/>`_.
|
||||
|
||||
.. versionchanged:: 2.0.5
|
||||
|
||||
pymssql was restored to SQLAlchemy's continuous integration testing
|
||||
|
||||
|
||||
""" # noqa
|
||||
import re
|
||||
|
||||
from .base import MSDialect
|
||||
from .base import MSIdentifierPreparer
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...engine import processors
|
||||
|
||||
|
||||
class _MSNumeric_pymssql(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class MSIdentifierPreparer_pymssql(MSIdentifierPreparer):
|
||||
def __init__(self, dialect):
|
||||
super().__init__(dialect)
|
||||
# pymssql has the very unusual behavior that it uses pyformat
|
||||
# yet does not require that percent signs be doubled
|
||||
self._double_percents = False
|
||||
|
||||
|
||||
class MSDialect_pymssql(MSDialect):
|
||||
supports_statement_cache = True
|
||||
supports_native_decimal = True
|
||||
supports_native_uuid = True
|
||||
driver = "pymssql"
|
||||
|
||||
preparer = MSIdentifierPreparer_pymssql
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{sqltypes.Numeric: _MSNumeric_pymssql, sqltypes.Float: sqltypes.Float},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
module = __import__("pymssql")
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, "decode") else str(x)
|
||||
|
||||
if client_ver < (1,):
|
||||
util.warn(
|
||||
"The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI."
|
||||
)
|
||||
return module
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.exec_driver_sql("select @@version").scalar()
|
||||
m = re.match(r"Microsoft .*? - (\d+)\.(\d+)\.(\d+)\.(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
opts.update(url.query)
|
||||
port = opts.pop("port", None)
|
||||
if port and "host" in opts:
|
||||
opts["host"] = "%s:%s" % (opts["host"], port)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
for msg in (
|
||||
"Adaptive Server connection timed out",
|
||||
"Net-Lib error during Connection reset by peer",
|
||||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
"message 20017", # Unexpected EOF from the server
|
||||
"message 20047", # DBPROCESS is dead or not enabled
|
||||
"The server failed to resume the transaction",
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_isolation_level_values(self, dbapi_connection):
|
||||
return super().get_isolation_level_values(dbapi_connection) + [
|
||||
"AUTOCOMMIT"
|
||||
]
|
||||
|
||||
def set_isolation_level(self, dbapi_connection, level):
|
||||
if level == "AUTOCOMMIT":
|
||||
dbapi_connection.autocommit(True)
|
||||
else:
|
||||
dbapi_connection.autocommit(False)
|
||||
super().set_isolation_level(dbapi_connection, level)
|
||||
|
||||
|
||||
dialect = MSDialect_pymssql
|
@ -0,0 +1,760 @@
|
||||
# dialects/mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r"""
|
||||
.. dialect:: mssql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: https://pypi.org/project/pyodbc/
|
||||
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
A DSN connection in ODBC means that a pre-existing ODBC datasource is
|
||||
configured on the client machine. The application then specifies the name
|
||||
of this datasource, which encompasses details such as the specific ODBC driver
|
||||
in use as well as the network address of the database. Assuming a datasource
|
||||
is configured on the client, a basic DSN-based connection looks like::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
DSN=some_dsn;UID=scott;PWD=tiger
|
||||
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Hostname-based connections are also supported by pyodbc. These are often
|
||||
easier to use than a DSN and have the additional advantage that the specific
|
||||
database name to connect towards may be specified locally in the URL, rather
|
||||
than it being fixed as part of a datasource configuration.
|
||||
|
||||
When using a hostname connection, the driver name must also be specified in the
|
||||
query parameters of the URL. As these names usually have spaces in them, the
|
||||
name must be URL encoded which means using plus signs for spaces::
|
||||
|
||||
engine = create_engine(
|
||||
"mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server"
|
||||
)
|
||||
|
||||
The ``driver`` keyword is significant to the pyodbc dialect and must be
|
||||
specified in lowercase.
|
||||
|
||||
Any other names passed in the query string are passed through in the pyodbc
|
||||
connect string, such as ``authentication``, ``TrustServerCertificate``, etc.
|
||||
Multiple keyword arguments must be separated by an ampersand (``&``); these
|
||||
will be translated to semicolons when the pyodbc connect string is generated
|
||||
internally::
|
||||
|
||||
e = create_engine(
|
||||
"mssql+pyodbc://scott:tiger@mssql2017:1433/test?"
|
||||
"driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
|
||||
"&authentication=ActiveDirectoryIntegrated"
|
||||
)
|
||||
|
||||
The equivalent URL can be constructed using :class:`_sa.engine.URL`::
|
||||
|
||||
from sqlalchemy.engine import URL
|
||||
|
||||
connection_url = URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="mssql2017",
|
||||
port=1433,
|
||||
database="test",
|
||||
query={
|
||||
"driver": "ODBC Driver 18 for SQL Server",
|
||||
"TrustServerCertificate": "yes",
|
||||
"authentication": "ActiveDirectoryIntegrated",
|
||||
},
|
||||
)
|
||||
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A PyODBC connection string can also be sent in pyodbc's format directly, as
|
||||
specified in `the PyODBC documentation
|
||||
<https://github.com/mkleehammer/pyodbc/wiki/Connecting-to-databases>`_,
|
||||
using the parameter ``odbc_connect``. A :class:`_sa.engine.URL` object
|
||||
can help make this easier::
|
||||
|
||||
from sqlalchemy.engine import URL
|
||||
|
||||
connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password"
|
||||
connection_url = URL.create(
|
||||
"mssql+pyodbc", query={"odbc_connect": connection_string}
|
||||
)
|
||||
|
||||
engine = create_engine(connection_url)
|
||||
|
||||
.. _mssql_pyodbc_access_tokens:
|
||||
|
||||
Connecting to databases with access tokens
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some database servers are set up to only accept access tokens for login. For
|
||||
example, SQL Server allows the use of Azure Active Directory tokens to connect
|
||||
to databases. This requires creating a credential object using the
|
||||
``azure-identity`` library. More information about the authentication step can be
|
||||
found in `Microsoft's documentation
|
||||
<https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate?tabs=bash>`_.
|
||||
|
||||
After getting an engine, the credentials need to be sent to ``pyodbc.connect``
|
||||
each time a connection is requested. One way to do this is to set up an event
|
||||
listener on the engine that adds the credential token to the dialect's connect
|
||||
call. This is discussed more generally in :ref:`engines_dynamic_tokens`. For
|
||||
SQL Server in particular, this is passed as an ODBC connection attribute with
|
||||
a data structure `described by Microsoft
|
||||
<https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_.
|
||||
|
||||
The following code snippet will create an engine that connects to an Azure SQL
|
||||
database using Azure credentials::
|
||||
|
||||
import struct
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.engine.url import URL
|
||||
from azure import identity
|
||||
|
||||
# Connection option for access tokens, as defined in msodbcsql.h
|
||||
SQL_COPT_SS_ACCESS_TOKEN = 1256
|
||||
TOKEN_URL = "https://database.windows.net/" # The token URL for any Azure SQL database
|
||||
|
||||
connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server"
|
||||
|
||||
engine = create_engine(connection_string)
|
||||
|
||||
azure_credentials = identity.DefaultAzureCredential()
|
||||
|
||||
|
||||
@event.listens_for(engine, "do_connect")
|
||||
def provide_token(dialect, conn_rec, cargs, cparams):
|
||||
# remove the "Trusted_Connection" parameter that SQLAlchemy adds
|
||||
cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "")
|
||||
|
||||
# create token credential
|
||||
raw_token = azure_credentials.get_token(TOKEN_URL).token.encode(
|
||||
"utf-16-le"
|
||||
)
|
||||
token_struct = struct.pack(
|
||||
f"<I{len(raw_token)}s", len(raw_token), raw_token
|
||||
)
|
||||
|
||||
# apply it to keyword arguments
|
||||
cparams["attrs_before"] = {SQL_COPT_SS_ACCESS_TOKEN: token_struct}
|
||||
|
||||
.. tip::
|
||||
|
||||
The ``Trusted_Connection`` token is currently added by the SQLAlchemy
|
||||
pyodbc dialect when no username or password is present. This needs
|
||||
to be removed per Microsoft's
|
||||
`documentation for Azure access tokens
|
||||
<https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_,
|
||||
stating that a connection string when using an access token must not contain
|
||||
``UID``, ``PWD``, ``Authentication`` or ``Trusted_Connection`` parameters.
|
||||
|
||||
.. _azure_synapse_ignore_no_transaction_on_rollback:
|
||||
|
||||
Avoiding transaction-related exceptions on Azure Synapse Analytics
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Azure Synapse Analytics has a significant difference in its transaction
|
||||
handling compared to plain SQL Server; in some cases an error within a Synapse
|
||||
transaction can cause it to be arbitrarily terminated on the server side, which
|
||||
then causes the DBAPI ``.rollback()`` method (as well as ``.commit()``) to
|
||||
fail. The issue prevents the usual DBAPI contract of allowing ``.rollback()``
|
||||
to pass silently if no transaction is present as the driver does not expect
|
||||
this condition. The symptom of this failure is an exception with a message
|
||||
resembling 'No corresponding transaction found. (111214)' when attempting to
|
||||
emit a ``.rollback()`` after an operation had a failure of some kind.
|
||||
|
||||
This specific case can be handled by passing ``ignore_no_transaction_on_rollback=True`` to
|
||||
the SQL Server dialect via the :func:`_sa.create_engine` function as follows::
|
||||
|
||||
engine = create_engine(
|
||||
connection_url, ignore_no_transaction_on_rollback=True
|
||||
)
|
||||
|
||||
Using the above parameter, the dialect will catch ``ProgrammingError``
|
||||
exceptions raised during ``connection.rollback()`` and emit a warning
|
||||
if the error message contains code ``111214``, however will not raise
|
||||
an exception.
|
||||
|
||||
.. versionadded:: 1.4.40 Added the
|
||||
``ignore_no_transaction_on_rollback=True`` parameter.
|
||||
|
||||
Enable autocommit for Azure SQL Data Warehouse (DW) connections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Azure SQL Data Warehouse does not support transactions,
|
||||
and that can cause problems with SQLAlchemy's "autobegin" (and implicit
|
||||
commit/rollback) behavior. We can avoid these problems by enabling autocommit
|
||||
at both the pyodbc and engine levels::
|
||||
|
||||
connection_url = sa.engine.URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="dw.azure.example.com",
|
||||
database="mydb",
|
||||
query={
|
||||
"driver": "ODBC Driver 17 for SQL Server",
|
||||
"autocommit": "True",
|
||||
},
|
||||
)
|
||||
|
||||
engine = create_engine(connection_url).execution_options(
|
||||
isolation_level="AUTOCOMMIT"
|
||||
)
|
||||
|
||||
Avoiding sending large string parameters as TEXT/NTEXT
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, for historical reasons, Microsoft's ODBC drivers for SQL Server
|
||||
send long string parameters (greater than 4000 SBCS characters or 2000 Unicode
|
||||
characters) as TEXT/NTEXT values. TEXT and NTEXT have been deprecated for many
|
||||
years and are starting to cause compatibility issues with newer versions of
|
||||
SQL_Server/Azure. For example, see `this
|
||||
issue <https://github.com/mkleehammer/pyodbc/issues/835>`_.
|
||||
|
||||
Starting with ODBC Driver 18 for SQL Server we can override the legacy
|
||||
behavior and pass long strings as varchar(max)/nvarchar(max) using the
|
||||
``LongAsMax=Yes`` connection string parameter::
|
||||
|
||||
connection_url = sa.engine.URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="mssqlserver.example.com",
|
||||
database="mydb",
|
||||
query={
|
||||
"driver": "ODBC Driver 18 for SQL Server",
|
||||
"LongAsMax": "Yes",
|
||||
},
|
||||
)
|
||||
|
||||
Pyodbc Pooling / connection close behavior
|
||||
------------------------------------------
|
||||
|
||||
PyODBC uses internal `pooling
|
||||
<https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ by
|
||||
default, which means connections will be longer lived than they are within
|
||||
SQLAlchemy itself. As SQLAlchemy has its own pooling behavior, it is often
|
||||
preferable to disable this behavior. This behavior can only be disabled
|
||||
globally at the PyODBC module level, **before** any connections are made::
|
||||
|
||||
import pyodbc
|
||||
|
||||
pyodbc.pooling = False
|
||||
|
||||
# don't use the engine before pooling is set to False
|
||||
engine = create_engine("mssql+pyodbc://user:pass@dsn")
|
||||
|
||||
If this variable is left at its default value of ``True``, **the application
|
||||
will continue to maintain active database connections**, even when the
|
||||
SQLAlchemy engine itself fully discards a connection or if the engine is
|
||||
disposed.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`pooling <https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ -
|
||||
in the PyODBC documentation.
|
||||
|
||||
Driver / Unicode Support
|
||||
-------------------------
|
||||
|
||||
PyODBC works best with Microsoft ODBC drivers, particularly in the area
|
||||
of Unicode support on both Python 2 and Python 3.
|
||||
|
||||
Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not**
|
||||
recommended; there have been historically many Unicode-related issues
|
||||
in this area, including before Microsoft offered ODBC drivers for Linux
|
||||
and OSX. Now that Microsoft offers drivers for all platforms, for
|
||||
PyODBC support these are recommended. FreeTDS remains relevant for
|
||||
non-ODBC drivers such as pymssql where it works very well.
|
||||
|
||||
|
||||
Rowcount Support
|
||||
----------------
|
||||
|
||||
Previous limitations with the SQLAlchemy ORM's "versioned rows" feature with
|
||||
Pyodbc have been resolved as of SQLAlchemy 2.0.5. See the notes at
|
||||
:ref:`mssql_rowcount_versioning`.
|
||||
|
||||
.. _mssql_pyodbc_fastexecutemany:
|
||||
|
||||
Fast Executemany Mode
|
||||
---------------------
|
||||
|
||||
The PyODBC driver includes support for a "fast executemany" mode of execution
|
||||
which greatly reduces round trips for a DBAPI ``executemany()`` call when using
|
||||
Microsoft ODBC drivers, for **limited size batches that fit in memory**. The
|
||||
feature is enabled by setting the attribute ``.fast_executemany`` on the DBAPI
|
||||
cursor when an executemany call is to be used. The SQLAlchemy PyODBC SQL
|
||||
Server dialect supports this parameter by passing the
|
||||
``fast_executemany`` parameter to
|
||||
:func:`_sa.create_engine` , when using the **Microsoft ODBC driver only**::
|
||||
|
||||
engine = create_engine(
|
||||
"mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server",
|
||||
fast_executemany=True,
|
||||
)
|
||||
|
||||
.. versionchanged:: 2.0.9 - the ``fast_executemany`` parameter now has its
|
||||
intended effect of this PyODBC feature taking effect for all INSERT
|
||||
statements that are executed with multiple parameter sets, which don't
|
||||
include RETURNING. Previously, SQLAlchemy 2.0's :term:`insertmanyvalues`
|
||||
feature would cause ``fast_executemany`` to not be used in most cases
|
||||
even if specified.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
`fast executemany <https://github.com/mkleehammer/pyodbc/wiki/Features-beyond-the-DB-API#fast_executemany>`_
|
||||
- on github
|
||||
|
||||
.. _mssql_pyodbc_setinputsizes:
|
||||
|
||||
Setinputsizes Support
|
||||
-----------------------
|
||||
|
||||
As of version 2.0, the pyodbc ``cursor.setinputsizes()`` method is used for
|
||||
all statement executions, except for ``cursor.executemany()`` calls when
|
||||
fast_executemany=True where it is not supported (assuming
|
||||
:ref:`insertmanyvalues <engine_insertmanyvalues>` is kept enabled,
|
||||
"fastexecutemany" will not take place for INSERT statements in any case).
|
||||
|
||||
The use of ``cursor.setinputsizes()`` can be disabled by passing
|
||||
``use_setinputsizes=False`` to :func:`_sa.create_engine`.
|
||||
|
||||
When ``use_setinputsizes`` is left at its default of ``True``, the
|
||||
specific per-type symbols passed to ``cursor.setinputsizes()`` can be
|
||||
programmatically customized using the :meth:`.DialectEvents.do_setinputsizes`
|
||||
hook. See that method for usage examples.
|
||||
|
||||
.. versionchanged:: 2.0 The mssql+pyodbc dialect now defaults to using
|
||||
``use_setinputsizes=True`` for all statement executions with the exception of
|
||||
cursor.executemany() calls when fast_executemany=True. The behavior can
|
||||
be turned off by passing ``use_setinputsizes=False`` to
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
""" # noqa
|
||||
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import re
|
||||
import struct
|
||||
|
||||
from .base import _MSDateTime
|
||||
from .base import _MSUnicode
|
||||
from .base import _MSUnicodeText
|
||||
from .base import BINARY
|
||||
from .base import DATETIMEOFFSET
|
||||
from .base import MSDialect
|
||||
from .base import MSExecutionContext
|
||||
from .base import VARBINARY
|
||||
from .json import JSON as _MSJson
|
||||
from .json import JSONIndexType as _MSJsonIndexType
|
||||
from .json import JSONPathType as _MSJsonPathType
|
||||
from ... import exc
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ...engine import cursor as _cursor
|
||||
|
||||
|
||||
class _ms_numeric_pyodbc:
|
||||
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
|
||||
|
||||
The routines here are needed for older pyodbc versions
|
||||
as well as current mxODBC versions.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_process = super().bind_processor(dialect)
|
||||
|
||||
if not dialect._need_decimal_fix:
|
||||
return super_process
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and isinstance(value, decimal.Decimal):
|
||||
adjusted = value.adjusted()
|
||||
if adjusted < 0:
|
||||
return self._small_dec_to_string(value)
|
||||
elif adjusted > 7:
|
||||
return self._large_dec_to_string(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
# these routines needed for older versions of pyodbc.
|
||||
# as of 2.1.8 this logic is integrated.
|
||||
|
||||
def _small_dec_to_string(self, value):
|
||||
return "%s0.%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"0" * (abs(value.adjusted()) - 1),
|
||||
"".join([str(nint) for nint in value.as_tuple()[1]]),
|
||||
)
|
||||
|
||||
def _large_dec_to_string(self, value):
|
||||
_int = value.as_tuple()[1]
|
||||
if "E" in str(value):
|
||||
result = "%s%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int]),
|
||||
"0" * (value.adjusted() - (len(_int) - 1)),
|
||||
)
|
||||
else:
|
||||
if (len(_int) - 1) > value.adjusted():
|
||||
result = "%s%s.%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int][0 : value.adjusted() + 1]),
|
||||
"".join([str(s) for s in _int][value.adjusted() + 1 :]),
|
||||
)
|
||||
else:
|
||||
result = "%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int][0 : value.adjusted() + 1]),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class _ms_binary_pyodbc:
|
||||
"""Wraps binary values in dialect-specific Binary wrapper.
|
||||
If the value is null, return a pyodbc-specific BinaryNull
|
||||
object to prevent pyODBC [and FreeTDS] from defaulting binary
|
||||
NULL types to SQLWCHAR and causing implicit conversion errors.
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class _ODBCDateTimeBindProcessor:
|
||||
"""Add bind processors to handle datetimeoffset behaviors"""
|
||||
|
||||
has_tz = False
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, str):
|
||||
# if a string was passed directly, allow it through
|
||||
return value
|
||||
elif not value.tzinfo or (not self.timezone and not self.has_tz):
|
||||
# for DateTime(timezone=False)
|
||||
return value
|
||||
else:
|
||||
# for DATETIMEOFFSET or DateTime(timezone=True)
|
||||
#
|
||||
# Convert to string format required by T-SQL
|
||||
dto_string = value.strftime("%Y-%m-%d %H:%M:%S.%f %z")
|
||||
# offset needs a colon, e.g., -0700 -> -07:00
|
||||
# "UTC offset in the form (+-)HHMM[SS[.ffffff]]"
|
||||
# backend currently rejects seconds / fractional seconds
|
||||
dto_string = re.sub(
|
||||
r"([\+\-]\d{2})([\d\.]+)$", r"\1:\2", dto_string
|
||||
)
|
||||
return dto_string
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class _ODBCDateTime(_ODBCDateTimeBindProcessor, _MSDateTime):
|
||||
pass
|
||||
|
||||
|
||||
class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET):
|
||||
has_tz = True
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY):
|
||||
pass
|
||||
|
||||
|
||||
class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY):
|
||||
pass
|
||||
|
||||
|
||||
class _String_pyodbc(sqltypes.String):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_VARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_VARCHAR
|
||||
|
||||
|
||||
class _Unicode_pyodbc(_MSUnicode):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _UnicodeText_pyodbc(_MSUnicodeText):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _JSON_pyodbc(_MSJson):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
|
||||
|
||||
class _JSONIndexType_pyodbc(_MSJsonIndexType):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _JSONPathType_pyodbc(_MSJsonPathType):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
"""where appropriate, issue "select scope_identity()" in the same
|
||||
statement.
|
||||
|
||||
Background on why "scope_identity()" is preferable to "@@identity":
|
||||
https://msdn.microsoft.com/en-us/library/ms190315.aspx
|
||||
|
||||
Background on why we attempt to embed "scope_identity()" into the same
|
||||
statement as the INSERT:
|
||||
https://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
|
||||
|
||||
"""
|
||||
|
||||
super().pre_exec()
|
||||
|
||||
# don't embed the scope_identity select into an
|
||||
# "INSERT .. DEFAULT VALUES"
|
||||
if (
|
||||
self._select_lastrowid
|
||||
and self.dialect.use_scope_identity
|
||||
and len(self.parameters[0])
|
||||
):
|
||||
self._embedded_scope_identity = True
|
||||
|
||||
self.statement += "; select scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
# Fetch the last inserted id from the manipulated statement
|
||||
# We may have to skip over a number of result sets with
|
||||
# no data (due to triggers, etc.)
|
||||
while True:
|
||||
try:
|
||||
# fetchall() ensures the cursor is consumed
|
||||
# without closing it (FreeTDS particularly)
|
||||
rows = self.cursor.fetchall()
|
||||
except self.dialect.dbapi.Error:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
else:
|
||||
if not rows:
|
||||
# async adapter drivers just return None here
|
||||
self.cursor.nextset()
|
||||
continue
|
||||
row = rows[0]
|
||||
break
|
||||
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML
|
||||
else:
|
||||
super().post_exec()
|
||||
|
||||
|
||||
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
||||
supports_statement_cache = True
|
||||
|
||||
# note this parameter is no longer used by the ORM or default dialect
|
||||
# see #9414
|
||||
supports_sane_rowcount_returning = False
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
BINARY: _BINARY_pyodbc,
|
||||
# support DateTime(timezone=True)
|
||||
sqltypes.DateTime: _ODBCDateTime,
|
||||
DATETIMEOFFSET: _ODBCDATETIMEOFFSET,
|
||||
# SQL Server dialect has a VARBINARY that is just to support
|
||||
# "deprecate_large_types" w/ VARBINARY(max), but also we must
|
||||
# handle the usual SQL standard VARBINARY
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
sqltypes.String: _String_pyodbc,
|
||||
sqltypes.Unicode: _Unicode_pyodbc,
|
||||
sqltypes.UnicodeText: _UnicodeText_pyodbc,
|
||||
sqltypes.JSON: _JSON_pyodbc,
|
||||
sqltypes.JSON.JSONIndexType: _JSONIndexType_pyodbc,
|
||||
sqltypes.JSON.JSONPathType: _JSONPathType_pyodbc,
|
||||
# this excludes Enum from the string/VARCHAR thing for now
|
||||
# it looks like Enum's adaptation doesn't really support the
|
||||
# String type itself having a dialect-level impl
|
||||
sqltypes.Enum: sqltypes.Enum,
|
||||
},
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fast_executemany=False,
|
||||
use_setinputsizes=True,
|
||||
**params,
|
||||
):
|
||||
super().__init__(use_setinputsizes=use_setinputsizes, **params)
|
||||
self.use_scope_identity = (
|
||||
self.use_scope_identity
|
||||
and self.dbapi
|
||||
and hasattr(self.dbapi.Cursor, "nextset")
|
||||
)
|
||||
self._need_decimal_fix = self.dbapi and self._dbapi_version() < (
|
||||
2,
|
||||
1,
|
||||
8,
|
||||
)
|
||||
self.fast_executemany = fast_executemany
|
||||
if fast_executemany:
|
||||
self.use_insertmanyvalues_wo_returning = False
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
try:
|
||||
# "Version of the instance of SQL Server, in the form
|
||||
# of 'major.minor.build.revision'"
|
||||
raw = connection.exec_driver_sql(
|
||||
"SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)"
|
||||
).scalar()
|
||||
except exc.DBAPIError:
|
||||
# SQL Server docs indicate this function isn't present prior to
|
||||
# 2008. Before we had the VARCHAR cast above, pyodbc would also
|
||||
# fail on this query.
|
||||
return super()._get_server_version_info(connection)
|
||||
else:
|
||||
version = []
|
||||
r = re.compile(r"[.\-]")
|
||||
for n in r.split(raw):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
pass
|
||||
return tuple(version)
|
||||
|
||||
def on_connect(self):
|
||||
super_ = super().on_connect()
|
||||
|
||||
def on_connect(conn):
|
||||
if super_ is not None:
|
||||
super_(conn)
|
||||
|
||||
self._setup_timestampoffset_type(conn)
|
||||
|
||||
return on_connect
|
||||
|
||||
def _setup_timestampoffset_type(self, connection):
|
||||
# output converter function for datetimeoffset
|
||||
def _handle_datetimeoffset(dto_value):
|
||||
tup = struct.unpack("<6hI2h", dto_value)
|
||||
return datetime.datetime(
|
||||
tup[0],
|
||||
tup[1],
|
||||
tup[2],
|
||||
tup[3],
|
||||
tup[4],
|
||||
tup[5],
|
||||
tup[6] // 1000,
|
||||
datetime.timezone(
|
||||
datetime.timedelta(hours=tup[7], minutes=tup[8])
|
||||
),
|
||||
)
|
||||
|
||||
odbc_SQL_SS_TIMESTAMPOFFSET = -155 # as defined in SQLNCLI.h
|
||||
connection.add_output_converter(
|
||||
odbc_SQL_SS_TIMESTAMPOFFSET, _handle_datetimeoffset
|
||||
)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if self.fast_executemany:
|
||||
cursor.fast_executemany = True
|
||||
super().do_executemany(cursor, statement, parameters, context=context)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
code = e.args[0]
|
||||
if code in {
|
||||
"08S01",
|
||||
"01000",
|
||||
"01002",
|
||||
"08003",
|
||||
"08007",
|
||||
"08S02",
|
||||
"08001",
|
||||
"HYT00",
|
||||
"HY010",
|
||||
"10054",
|
||||
}:
|
||||
return True
|
||||
return super().is_disconnect(e, connection, cursor)
|
||||
|
||||
|
||||
dialect = MSDialect_pyodbc
|
@ -0,0 +1,104 @@
|
||||
# dialects/mysql/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
from . import aiomysql # noqa
|
||||
from . import asyncmy # noqa
|
||||
from . import base # noqa
|
||||
from . import cymysql # noqa
|
||||
from . import mariadbconnector # noqa
|
||||
from . import mysqlconnector # noqa
|
||||
from . import mysqldb # noqa
|
||||
from . import pymysql # noqa
|
||||
from . import pyodbc # noqa
|
||||
from .base import BIGINT
|
||||
from .base import BINARY
|
||||
from .base import BIT
|
||||
from .base import BLOB
|
||||
from .base import BOOLEAN
|
||||
from .base import CHAR
|
||||
from .base import DATE
|
||||
from .base import DATETIME
|
||||
from .base import DECIMAL
|
||||
from .base import DOUBLE
|
||||
from .base import ENUM
|
||||
from .base import FLOAT
|
||||
from .base import INTEGER
|
||||
from .base import JSON
|
||||
from .base import LONGBLOB
|
||||
from .base import LONGTEXT
|
||||
from .base import MEDIUMBLOB
|
||||
from .base import MEDIUMINT
|
||||
from .base import MEDIUMTEXT
|
||||
from .base import NCHAR
|
||||
from .base import NUMERIC
|
||||
from .base import NVARCHAR
|
||||
from .base import REAL
|
||||
from .base import SET
|
||||
from .base import SMALLINT
|
||||
from .base import TEXT
|
||||
from .base import TIME
|
||||
from .base import TIMESTAMP
|
||||
from .base import TINYBLOB
|
||||
from .base import TINYINT
|
||||
from .base import TINYTEXT
|
||||
from .base import VARBINARY
|
||||
from .base import VARCHAR
|
||||
from .base import YEAR
|
||||
from .dml import Insert
|
||||
from .dml import insert
|
||||
from .expression import match
|
||||
from .mariadb import INET4
|
||||
from .mariadb import INET6
|
||||
|
||||
# default dialect
|
||||
base.dialect = dialect = mysqldb.dialect
|
||||
|
||||
__all__ = (
|
||||
"BIGINT",
|
||||
"BINARY",
|
||||
"BIT",
|
||||
"BLOB",
|
||||
"BOOLEAN",
|
||||
"CHAR",
|
||||
"DATE",
|
||||
"DATETIME",
|
||||
"DECIMAL",
|
||||
"DOUBLE",
|
||||
"ENUM",
|
||||
"FLOAT",
|
||||
"INET4",
|
||||
"INET6",
|
||||
"INTEGER",
|
||||
"INTEGER",
|
||||
"JSON",
|
||||
"LONGBLOB",
|
||||
"LONGTEXT",
|
||||
"MEDIUMBLOB",
|
||||
"MEDIUMINT",
|
||||
"MEDIUMTEXT",
|
||||
"NCHAR",
|
||||
"NVARCHAR",
|
||||
"NUMERIC",
|
||||
"SET",
|
||||
"SMALLINT",
|
||||
"REAL",
|
||||
"TEXT",
|
||||
"TIME",
|
||||
"TIMESTAMP",
|
||||
"TINYBLOB",
|
||||
"TINYINT",
|
||||
"TINYTEXT",
|
||||
"VARBINARY",
|
||||
"VARCHAR",
|
||||
"YEAR",
|
||||
"dialect",
|
||||
"insert",
|
||||
"Insert",
|
||||
"match",
|
||||
)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,335 @@
|
||||
# dialects/mysql/aiomysql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r"""
|
||||
.. dialect:: mysql+aiomysql
|
||||
:name: aiomysql
|
||||
:dbapi: aiomysql
|
||||
:connectstring: mysql+aiomysql://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://github.com/aio-libs/aiomysql
|
||||
|
||||
The aiomysql dialect is SQLAlchemy's second Python asyncio dialect.
|
||||
|
||||
Using a special asyncio mediation layer, the aiomysql dialect is usable
|
||||
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
|
||||
extension package.
|
||||
|
||||
This dialect should normally be used only with the
|
||||
:func:`_asyncio.create_async_engine` engine creation function::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
engine = create_async_engine(
|
||||
"mysql+aiomysql://user:pass@hostname/dbname?charset=utf8mb4"
|
||||
)
|
||||
|
||||
""" # noqa
|
||||
from collections import deque
|
||||
|
||||
from .pymysql import MySQLDialect_pymysql
|
||||
from ... import pool
|
||||
from ... import util
|
||||
from ...engine import AdaptedConnection
|
||||
from ...util.concurrency import asyncio
|
||||
from ...util.concurrency import await_fallback
|
||||
from ...util.concurrency import await_only
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_cursor:
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
server_side = False
|
||||
__slots__ = (
|
||||
"_adapt_connection",
|
||||
"_connection",
|
||||
"await_",
|
||||
"_cursor",
|
||||
"_rows",
|
||||
)
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor(adapt_connection.dbapi.Cursor)
|
||||
|
||||
# see https://github.com/aio-libs/aiomysql/issues/543
|
||||
self._cursor = self.await_(cursor.__aenter__())
|
||||
self._rows = deque()
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self._cursor.description
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
return self._cursor.rowcount
|
||||
|
||||
@property
|
||||
def arraysize(self):
|
||||
return self._cursor.arraysize
|
||||
|
||||
@arraysize.setter
|
||||
def arraysize(self, value):
|
||||
self._cursor.arraysize = value
|
||||
|
||||
@property
|
||||
def lastrowid(self):
|
||||
return self._cursor.lastrowid
|
||||
|
||||
def close(self):
|
||||
# note we aren't actually closing the cursor here,
|
||||
# we are just letting GC do it. to allow this to be async
|
||||
# we would need the Result to change how it does "Safe close cursor".
|
||||
# MySQL "cursors" don't actually have state to be "closed" besides
|
||||
# exhausting rows, which we already have done for sync cursor.
|
||||
# another option would be to emulate aiosqlite dialect and assign
|
||||
# cursor only if we are doing server side cursor operation.
|
||||
self._rows.clear()
|
||||
|
||||
def execute(self, operation, parameters=None):
|
||||
return self.await_(self._execute_async(operation, parameters))
|
||||
|
||||
def executemany(self, operation, seq_of_parameters):
|
||||
return self.await_(
|
||||
self._executemany_async(operation, seq_of_parameters)
|
||||
)
|
||||
|
||||
async def _execute_async(self, operation, parameters):
|
||||
async with self._adapt_connection._execute_mutex:
|
||||
result = await self._cursor.execute(operation, parameters)
|
||||
|
||||
if not self.server_side:
|
||||
# aiomysql has a "fake" async result, so we have to pull it out
|
||||
# of that here since our default result is not async.
|
||||
# we could just as easily grab "_rows" here and be done with it
|
||||
# but this is safer.
|
||||
self._rows = deque(await self._cursor.fetchall())
|
||||
return result
|
||||
|
||||
async def _executemany_async(self, operation, seq_of_parameters):
|
||||
async with self._adapt_connection._execute_mutex:
|
||||
return await self._cursor.executemany(operation, seq_of_parameters)
|
||||
|
||||
def setinputsizes(self, *inputsizes):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
while self._rows:
|
||||
yield self._rows.popleft()
|
||||
|
||||
def fetchone(self):
|
||||
if self._rows:
|
||||
return self._rows.popleft()
|
||||
else:
|
||||
return None
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
if size is None:
|
||||
size = self.arraysize
|
||||
|
||||
rr = self._rows
|
||||
return [rr.popleft() for _ in range(min(size, len(rr)))]
|
||||
|
||||
def fetchall(self):
|
||||
retval = list(self._rows)
|
||||
self._rows.clear()
|
||||
return retval
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
server_side = True
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor(adapt_connection.dbapi.SSCursor)
|
||||
|
||||
self._cursor = self.await_(cursor.__aenter__())
|
||||
|
||||
def close(self):
|
||||
if self._cursor is not None:
|
||||
self.await_(self._cursor.close())
|
||||
self._cursor = None
|
||||
|
||||
def fetchone(self):
|
||||
return self.await_(self._cursor.fetchone())
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
return self.await_(self._cursor.fetchmany(size=size))
|
||||
|
||||
def fetchall(self):
|
||||
return self.await_(self._cursor.fetchall())
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_connection(AdaptedConnection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
await_ = staticmethod(await_only)
|
||||
__slots__ = ("dbapi", "_execute_mutex")
|
||||
|
||||
def __init__(self, dbapi, connection):
|
||||
self.dbapi = dbapi
|
||||
self._connection = connection
|
||||
self._execute_mutex = asyncio.Lock()
|
||||
|
||||
def ping(self, reconnect):
|
||||
return self.await_(self._connection.ping(reconnect))
|
||||
|
||||
def character_set_name(self):
|
||||
return self._connection.character_set_name()
|
||||
|
||||
def autocommit(self, value):
|
||||
self.await_(self._connection.autocommit(value))
|
||||
|
||||
def cursor(self, server_side=False):
|
||||
if server_side:
|
||||
return AsyncAdapt_aiomysql_ss_cursor(self)
|
||||
else:
|
||||
return AsyncAdapt_aiomysql_cursor(self)
|
||||
|
||||
def rollback(self):
|
||||
self.await_(self._connection.rollback())
|
||||
|
||||
def commit(self):
|
||||
self.await_(self._connection.commit())
|
||||
|
||||
def terminate(self):
|
||||
# it's not awaitable.
|
||||
self._connection.close()
|
||||
|
||||
def close(self) -> None:
|
||||
self.await_(self._connection.ensure_closed())
|
||||
|
||||
|
||||
class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
|
||||
await_ = staticmethod(await_fallback)
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_dbapi:
|
||||
def __init__(self, aiomysql, pymysql):
|
||||
self.aiomysql = aiomysql
|
||||
self.pymysql = pymysql
|
||||
self.paramstyle = "format"
|
||||
self._init_dbapi_attributes()
|
||||
self.Cursor, self.SSCursor = self._init_cursors_subclasses()
|
||||
|
||||
def _init_dbapi_attributes(self):
|
||||
for name in (
|
||||
"Warning",
|
||||
"Error",
|
||||
"InterfaceError",
|
||||
"DataError",
|
||||
"DatabaseError",
|
||||
"OperationalError",
|
||||
"InterfaceError",
|
||||
"IntegrityError",
|
||||
"ProgrammingError",
|
||||
"InternalError",
|
||||
"NotSupportedError",
|
||||
):
|
||||
setattr(self, name, getattr(self.aiomysql, name))
|
||||
|
||||
for name in (
|
||||
"NUMBER",
|
||||
"STRING",
|
||||
"DATETIME",
|
||||
"BINARY",
|
||||
"TIMESTAMP",
|
||||
"Binary",
|
||||
):
|
||||
setattr(self, name, getattr(self.pymysql, name))
|
||||
|
||||
def connect(self, *arg, **kw):
|
||||
async_fallback = kw.pop("async_fallback", False)
|
||||
creator_fn = kw.pop("async_creator_fn", self.aiomysql.connect)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return AsyncAdaptFallback_aiomysql_connection(
|
||||
self,
|
||||
await_fallback(creator_fn(*arg, **kw)),
|
||||
)
|
||||
else:
|
||||
return AsyncAdapt_aiomysql_connection(
|
||||
self,
|
||||
await_only(creator_fn(*arg, **kw)),
|
||||
)
|
||||
|
||||
def _init_cursors_subclasses(self):
|
||||
# suppress unconditional warning emitted by aiomysql
|
||||
class Cursor(self.aiomysql.Cursor):
|
||||
async def _show_warnings(self, conn):
|
||||
pass
|
||||
|
||||
class SSCursor(self.aiomysql.SSCursor):
|
||||
async def _show_warnings(self, conn):
|
||||
pass
|
||||
|
||||
return Cursor, SSCursor
|
||||
|
||||
|
||||
class MySQLDialect_aiomysql(MySQLDialect_pymysql):
|
||||
driver = "aiomysql"
|
||||
supports_statement_cache = True
|
||||
|
||||
supports_server_side_cursors = True
|
||||
_sscursor = AsyncAdapt_aiomysql_ss_cursor
|
||||
|
||||
is_async = True
|
||||
has_terminate = True
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return AsyncAdapt_aiomysql_dbapi(
|
||||
__import__("aiomysql"), __import__("pymysql")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
async_fallback = url.query.get("async_fallback", False)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return pool.FallbackAsyncAdaptedQueuePool
|
||||
else:
|
||||
return pool.AsyncAdaptedQueuePool
|
||||
|
||||
def do_terminate(self, dbapi_connection) -> None:
|
||||
dbapi_connection.terminate()
|
||||
|
||||
def create_connect_args(self, url):
|
||||
return super().create_connect_args(
|
||||
url, _translate_args=dict(username="user", database="db")
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if super().is_disconnect(e, connection, cursor):
|
||||
return True
|
||||
else:
|
||||
str_e = str(e).lower()
|
||||
return "not connected" in str_e
|
||||
|
||||
def _found_rows_client_flag(self):
|
||||
from pymysql.constants import CLIENT
|
||||
|
||||
return CLIENT.FOUND_ROWS
|
||||
|
||||
def get_driver_connection(self, connection):
|
||||
return connection._connection
|
||||
|
||||
|
||||
dialect = MySQLDialect_aiomysql
|
@ -0,0 +1,339 @@
|
||||
# dialects/mysql/asyncmy.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r"""
|
||||
.. dialect:: mysql+asyncmy
|
||||
:name: asyncmy
|
||||
:dbapi: asyncmy
|
||||
:connectstring: mysql+asyncmy://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://github.com/long2ice/asyncmy
|
||||
|
||||
Using a special asyncio mediation layer, the asyncmy dialect is usable
|
||||
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
|
||||
extension package.
|
||||
|
||||
This dialect should normally be used only with the
|
||||
:func:`_asyncio.create_async_engine` engine creation function::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
engine = create_async_engine(
|
||||
"mysql+asyncmy://user:pass@hostname/dbname?charset=utf8mb4"
|
||||
)
|
||||
|
||||
""" # noqa
|
||||
from collections import deque
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from .pymysql import MySQLDialect_pymysql
|
||||
from ... import pool
|
||||
from ... import util
|
||||
from ...engine import AdaptedConnection
|
||||
from ...util.concurrency import asyncio
|
||||
from ...util.concurrency import await_fallback
|
||||
from ...util.concurrency import await_only
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_cursor:
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
server_side = False
|
||||
__slots__ = (
|
||||
"_adapt_connection",
|
||||
"_connection",
|
||||
"await_",
|
||||
"_cursor",
|
||||
"_rows",
|
||||
)
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor()
|
||||
|
||||
self._cursor = self.await_(cursor.__aenter__())
|
||||
self._rows = deque()
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self._cursor.description
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
return self._cursor.rowcount
|
||||
|
||||
@property
|
||||
def arraysize(self):
|
||||
return self._cursor.arraysize
|
||||
|
||||
@arraysize.setter
|
||||
def arraysize(self, value):
|
||||
self._cursor.arraysize = value
|
||||
|
||||
@property
|
||||
def lastrowid(self):
|
||||
return self._cursor.lastrowid
|
||||
|
||||
def close(self):
|
||||
# note we aren't actually closing the cursor here,
|
||||
# we are just letting GC do it. to allow this to be async
|
||||
# we would need the Result to change how it does "Safe close cursor".
|
||||
# MySQL "cursors" don't actually have state to be "closed" besides
|
||||
# exhausting rows, which we already have done for sync cursor.
|
||||
# another option would be to emulate aiosqlite dialect and assign
|
||||
# cursor only if we are doing server side cursor operation.
|
||||
self._rows.clear()
|
||||
|
||||
def execute(self, operation, parameters=None):
|
||||
return self.await_(self._execute_async(operation, parameters))
|
||||
|
||||
def executemany(self, operation, seq_of_parameters):
|
||||
return self.await_(
|
||||
self._executemany_async(operation, seq_of_parameters)
|
||||
)
|
||||
|
||||
async def _execute_async(self, operation, parameters):
|
||||
async with self._adapt_connection._mutex_and_adapt_errors():
|
||||
if parameters is None:
|
||||
result = await self._cursor.execute(operation)
|
||||
else:
|
||||
result = await self._cursor.execute(operation, parameters)
|
||||
|
||||
if not self.server_side:
|
||||
# asyncmy has a "fake" async result, so we have to pull it out
|
||||
# of that here since our default result is not async.
|
||||
# we could just as easily grab "_rows" here and be done with it
|
||||
# but this is safer.
|
||||
self._rows = deque(await self._cursor.fetchall())
|
||||
return result
|
||||
|
||||
async def _executemany_async(self, operation, seq_of_parameters):
|
||||
async with self._adapt_connection._mutex_and_adapt_errors():
|
||||
return await self._cursor.executemany(operation, seq_of_parameters)
|
||||
|
||||
def setinputsizes(self, *inputsizes):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
while self._rows:
|
||||
yield self._rows.popleft()
|
||||
|
||||
def fetchone(self):
|
||||
if self._rows:
|
||||
return self._rows.popleft()
|
||||
else:
|
||||
return None
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
if size is None:
|
||||
size = self.arraysize
|
||||
|
||||
rr = self._rows
|
||||
return [rr.popleft() for _ in range(min(size, len(rr)))]
|
||||
|
||||
def fetchall(self):
|
||||
retval = list(self._rows)
|
||||
self._rows.clear()
|
||||
return retval
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
server_side = True
|
||||
|
||||
def __init__(self, adapt_connection):
|
||||
self._adapt_connection = adapt_connection
|
||||
self._connection = adapt_connection._connection
|
||||
self.await_ = adapt_connection.await_
|
||||
|
||||
cursor = self._connection.cursor(
|
||||
adapt_connection.dbapi.asyncmy.cursors.SSCursor
|
||||
)
|
||||
|
||||
self._cursor = self.await_(cursor.__aenter__())
|
||||
|
||||
def close(self):
|
||||
if self._cursor is not None:
|
||||
self.await_(self._cursor.close())
|
||||
self._cursor = None
|
||||
|
||||
def fetchone(self):
|
||||
return self.await_(self._cursor.fetchone())
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
return self.await_(self._cursor.fetchmany(size=size))
|
||||
|
||||
def fetchall(self):
|
||||
return self.await_(self._cursor.fetchall())
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_connection(AdaptedConnection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
await_ = staticmethod(await_only)
|
||||
__slots__ = ("dbapi", "_execute_mutex")
|
||||
|
||||
def __init__(self, dbapi, connection):
|
||||
self.dbapi = dbapi
|
||||
self._connection = connection
|
||||
self._execute_mutex = asyncio.Lock()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _mutex_and_adapt_errors(self):
|
||||
async with self._execute_mutex:
|
||||
try:
|
||||
yield
|
||||
except AttributeError:
|
||||
raise self.dbapi.InternalError(
|
||||
"network operation failed due to asyncmy attribute error"
|
||||
)
|
||||
|
||||
def ping(self, reconnect):
|
||||
assert not reconnect
|
||||
return self.await_(self._do_ping())
|
||||
|
||||
async def _do_ping(self):
|
||||
async with self._mutex_and_adapt_errors():
|
||||
return await self._connection.ping(False)
|
||||
|
||||
def character_set_name(self):
|
||||
return self._connection.character_set_name()
|
||||
|
||||
def autocommit(self, value):
|
||||
self.await_(self._connection.autocommit(value))
|
||||
|
||||
def cursor(self, server_side=False):
|
||||
if server_side:
|
||||
return AsyncAdapt_asyncmy_ss_cursor(self)
|
||||
else:
|
||||
return AsyncAdapt_asyncmy_cursor(self)
|
||||
|
||||
def rollback(self):
|
||||
self.await_(self._connection.rollback())
|
||||
|
||||
def commit(self):
|
||||
self.await_(self._connection.commit())
|
||||
|
||||
def terminate(self):
|
||||
# it's not awaitable.
|
||||
self._connection.close()
|
||||
|
||||
def close(self) -> None:
|
||||
self.await_(self._connection.ensure_closed())
|
||||
|
||||
|
||||
class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection):
|
||||
__slots__ = ()
|
||||
|
||||
await_ = staticmethod(await_fallback)
|
||||
|
||||
|
||||
def _Binary(x):
|
||||
"""Return x as a binary type."""
|
||||
return bytes(x)
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_dbapi:
|
||||
def __init__(self, asyncmy):
|
||||
self.asyncmy = asyncmy
|
||||
self.paramstyle = "format"
|
||||
self._init_dbapi_attributes()
|
||||
|
||||
def _init_dbapi_attributes(self):
|
||||
for name in (
|
||||
"Warning",
|
||||
"Error",
|
||||
"InterfaceError",
|
||||
"DataError",
|
||||
"DatabaseError",
|
||||
"OperationalError",
|
||||
"InterfaceError",
|
||||
"IntegrityError",
|
||||
"ProgrammingError",
|
||||
"InternalError",
|
||||
"NotSupportedError",
|
||||
):
|
||||
setattr(self, name, getattr(self.asyncmy.errors, name))
|
||||
|
||||
STRING = util.symbol("STRING")
|
||||
NUMBER = util.symbol("NUMBER")
|
||||
BINARY = util.symbol("BINARY")
|
||||
DATETIME = util.symbol("DATETIME")
|
||||
TIMESTAMP = util.symbol("TIMESTAMP")
|
||||
Binary = staticmethod(_Binary)
|
||||
|
||||
def connect(self, *arg, **kw):
|
||||
async_fallback = kw.pop("async_fallback", False)
|
||||
creator_fn = kw.pop("async_creator_fn", self.asyncmy.connect)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return AsyncAdaptFallback_asyncmy_connection(
|
||||
self,
|
||||
await_fallback(creator_fn(*arg, **kw)),
|
||||
)
|
||||
else:
|
||||
return AsyncAdapt_asyncmy_connection(
|
||||
self,
|
||||
await_only(creator_fn(*arg, **kw)),
|
||||
)
|
||||
|
||||
|
||||
class MySQLDialect_asyncmy(MySQLDialect_pymysql):
|
||||
driver = "asyncmy"
|
||||
supports_statement_cache = True
|
||||
|
||||
supports_server_side_cursors = True
|
||||
_sscursor = AsyncAdapt_asyncmy_ss_cursor
|
||||
|
||||
is_async = True
|
||||
has_terminate = True
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return AsyncAdapt_asyncmy_dbapi(__import__("asyncmy"))
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
async_fallback = url.query.get("async_fallback", False)
|
||||
|
||||
if util.asbool(async_fallback):
|
||||
return pool.FallbackAsyncAdaptedQueuePool
|
||||
else:
|
||||
return pool.AsyncAdaptedQueuePool
|
||||
|
||||
def do_terminate(self, dbapi_connection) -> None:
|
||||
dbapi_connection.terminate()
|
||||
|
||||
def create_connect_args(self, url):
|
||||
return super().create_connect_args(
|
||||
url, _translate_args=dict(username="user", database="db")
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if super().is_disconnect(e, connection, cursor):
|
||||
return True
|
||||
else:
|
||||
str_e = str(e).lower()
|
||||
return (
|
||||
"not connected" in str_e or "network operation failed" in str_e
|
||||
)
|
||||
|
||||
def _found_rows_client_flag(self):
|
||||
from asyncmy.constants import CLIENT
|
||||
|
||||
return CLIENT.FOUND_ROWS
|
||||
|
||||
def get_driver_connection(self, connection):
|
||||
return connection._connection
|
||||
|
||||
|
||||
dialect = MySQLDialect_asyncmy
|
3582
venv/lib/python3.11/site-packages/sqlalchemy/dialects/mysql/base.py
Normal file
3582
venv/lib/python3.11/site-packages/sqlalchemy/dialects/mysql/base.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,84 @@
|
||||
# dialects/mysql/cymysql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r"""
|
||||
|
||||
.. dialect:: mysql+cymysql
|
||||
:name: CyMySQL
|
||||
:dbapi: cymysql
|
||||
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>[?<options>]
|
||||
:url: https://github.com/nakagami/CyMySQL
|
||||
|
||||
.. note::
|
||||
|
||||
The CyMySQL dialect is **not tested as part of SQLAlchemy's continuous
|
||||
integration** and may have unresolved issues. The recommended MySQL
|
||||
dialects are mysqlclient and PyMySQL.
|
||||
|
||||
""" # noqa
|
||||
|
||||
from .base import BIT
|
||||
from .base import MySQLDialect
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ... import util
|
||||
|
||||
|
||||
class _cymysqlBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert MySQL's 64 bit, variable length binary string to a long."""
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in iter(value):
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
|
||||
driver = "cymysql"
|
||||
supports_statement_cache = True
|
||||
|
||||
description_encoding = None
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
|
||||
colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT})
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return __import__("cymysql")
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.OperationalError):
|
||||
return self._extract_error_code(e) in (
|
||||
2006,
|
||||
2013,
|
||||
2014,
|
||||
2045,
|
||||
2055,
|
||||
)
|
||||
elif isinstance(e, self.dbapi.InterfaceError):
|
||||
# if underlying connection is closed,
|
||||
# this is the error you get
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
dialect = MySQLDialect_cymysql
|
@ -0,0 +1,225 @@
|
||||
# dialects/mysql/dml.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
from ... import exc
|
||||
from ... import util
|
||||
from ...sql._typing import _DMLTableArgument
|
||||
from ...sql.base import _exclusive_against
|
||||
from ...sql.base import _generative
|
||||
from ...sql.base import ColumnCollection
|
||||
from ...sql.base import ReadOnlyColumnCollection
|
||||
from ...sql.dml import Insert as StandardInsert
|
||||
from ...sql.elements import ClauseElement
|
||||
from ...sql.elements import KeyedColumnElement
|
||||
from ...sql.expression import alias
|
||||
from ...sql.selectable import NamedFromClause
|
||||
from ...util.typing import Self
|
||||
|
||||
|
||||
__all__ = ("Insert", "insert")
|
||||
|
||||
|
||||
def insert(table: _DMLTableArgument) -> Insert:
|
||||
"""Construct a MySQL/MariaDB-specific variant :class:`_mysql.Insert`
|
||||
construct.
|
||||
|
||||
.. container:: inherited_member
|
||||
|
||||
The :func:`sqlalchemy.dialects.mysql.insert` function creates
|
||||
a :class:`sqlalchemy.dialects.mysql.Insert`. This class is based
|
||||
on the dialect-agnostic :class:`_sql.Insert` construct which may
|
||||
be constructed using the :func:`_sql.insert` function in
|
||||
SQLAlchemy Core.
|
||||
|
||||
The :class:`_mysql.Insert` construct includes additional methods
|
||||
:meth:`_mysql.Insert.on_duplicate_key_update`.
|
||||
|
||||
"""
|
||||
return Insert(table)
|
||||
|
||||
|
||||
class Insert(StandardInsert):
|
||||
"""MySQL-specific implementation of INSERT.
|
||||
|
||||
Adds methods for MySQL-specific syntaxes such as ON DUPLICATE KEY UPDATE.
|
||||
|
||||
The :class:`~.mysql.Insert` object is created using the
|
||||
:func:`sqlalchemy.dialects.mysql.insert` function.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
|
||||
stringify_dialect = "mysql"
|
||||
inherit_cache = False
|
||||
|
||||
@property
|
||||
def inserted(
|
||||
self,
|
||||
) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
|
||||
"""Provide the "inserted" namespace for an ON DUPLICATE KEY UPDATE
|
||||
statement
|
||||
|
||||
MySQL's ON DUPLICATE KEY UPDATE clause allows reference to the row
|
||||
that would be inserted, via a special function called ``VALUES()``.
|
||||
This attribute provides all columns in this row to be referenceable
|
||||
such that they will render within a ``VALUES()`` function inside the
|
||||
ON DUPLICATE KEY UPDATE clause. The attribute is named ``.inserted``
|
||||
so as not to conflict with the existing
|
||||
:meth:`_expression.Insert.values` method.
|
||||
|
||||
.. tip:: The :attr:`_mysql.Insert.inserted` attribute is an instance
|
||||
of :class:`_expression.ColumnCollection`, which provides an
|
||||
interface the same as that of the :attr:`_schema.Table.c`
|
||||
collection described at :ref:`metadata_tables_and_columns`.
|
||||
With this collection, ordinary names are accessible like attributes
|
||||
(e.g. ``stmt.inserted.some_column``), but special names and
|
||||
dictionary method names should be accessed using indexed access,
|
||||
such as ``stmt.inserted["column name"]`` or
|
||||
``stmt.inserted["values"]``. See the docstring for
|
||||
:class:`_expression.ColumnCollection` for further examples.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mysql_insert_on_duplicate_key_update` - example of how
|
||||
to use :attr:`_expression.Insert.inserted`
|
||||
|
||||
"""
|
||||
return self.inserted_alias.columns
|
||||
|
||||
@util.memoized_property
|
||||
def inserted_alias(self) -> NamedFromClause:
|
||||
return alias(self.table, name="inserted")
|
||||
|
||||
@_generative
|
||||
@_exclusive_against(
|
||||
"_post_values_clause",
|
||||
msgs={
|
||||
"_post_values_clause": "This Insert construct already "
|
||||
"has an ON DUPLICATE KEY clause present"
|
||||
},
|
||||
)
|
||||
def on_duplicate_key_update(self, *args: _UpdateArg, **kw: Any) -> Self:
|
||||
r"""
|
||||
Specifies the ON DUPLICATE KEY UPDATE clause.
|
||||
|
||||
:param \**kw: Column keys linked to UPDATE values. The
|
||||
values may be any SQL expression or supported literal Python
|
||||
values.
|
||||
|
||||
.. warning:: This dictionary does **not** take into account
|
||||
Python-specified default UPDATE values or generation functions,
|
||||
e.g. those specified using :paramref:`_schema.Column.onupdate`.
|
||||
These values will not be exercised for an ON DUPLICATE KEY UPDATE
|
||||
style of UPDATE, unless values are manually specified here.
|
||||
|
||||
:param \*args: As an alternative to passing key/value parameters,
|
||||
a dictionary or list of 2-tuples can be passed as a single positional
|
||||
argument.
|
||||
|
||||
Passing a single dictionary is equivalent to the keyword argument
|
||||
form::
|
||||
|
||||
insert().on_duplicate_key_update({"name": "some name"})
|
||||
|
||||
Passing a list of 2-tuples indicates that the parameter assignments
|
||||
in the UPDATE clause should be ordered as sent, in a manner similar
|
||||
to that described for the :class:`_expression.Update`
|
||||
construct overall
|
||||
in :ref:`tutorial_parameter_ordered_updates`::
|
||||
|
||||
insert().on_duplicate_key_update(
|
||||
[
|
||||
("name", "some name"),
|
||||
("value", "some value"),
|
||||
]
|
||||
)
|
||||
|
||||
.. versionchanged:: 1.3 parameters can be specified as a dictionary
|
||||
or list of 2-tuples; the latter form provides for parameter
|
||||
ordering.
|
||||
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mysql_insert_on_duplicate_key_update`
|
||||
|
||||
"""
|
||||
if args and kw:
|
||||
raise exc.ArgumentError(
|
||||
"Can't pass kwargs and positional arguments simultaneously"
|
||||
)
|
||||
|
||||
if args:
|
||||
if len(args) > 1:
|
||||
raise exc.ArgumentError(
|
||||
"Only a single dictionary or list of tuples "
|
||||
"is accepted positionally."
|
||||
)
|
||||
values = args[0]
|
||||
else:
|
||||
values = kw
|
||||
|
||||
self._post_values_clause = OnDuplicateClause(
|
||||
self.inserted_alias, values
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
class OnDuplicateClause(ClauseElement):
|
||||
__visit_name__ = "on_duplicate_key_update"
|
||||
|
||||
_parameter_ordering: Optional[List[str]] = None
|
||||
|
||||
update: Dict[str, Any]
|
||||
stringify_dialect = "mysql"
|
||||
|
||||
def __init__(
|
||||
self, inserted_alias: NamedFromClause, update: _UpdateArg
|
||||
) -> None:
|
||||
self.inserted_alias = inserted_alias
|
||||
|
||||
# auto-detect that parameters should be ordered. This is copied from
|
||||
# Update._proces_colparams(), however we don't look for a special flag
|
||||
# in this case since we are not disambiguating from other use cases as
|
||||
# we are in Update.values().
|
||||
if isinstance(update, list) and (
|
||||
update and isinstance(update[0], tuple)
|
||||
):
|
||||
self._parameter_ordering = [key for key, value in update]
|
||||
update = dict(update)
|
||||
|
||||
if isinstance(update, dict):
|
||||
if not update:
|
||||
raise ValueError(
|
||||
"update parameter dictionary must not be empty"
|
||||
)
|
||||
elif isinstance(update, ColumnCollection):
|
||||
update = dict(update)
|
||||
else:
|
||||
raise ValueError(
|
||||
"update parameter must be a non-empty dictionary "
|
||||
"or a ColumnCollection such as the `.c.` collection "
|
||||
"of a Table object"
|
||||
)
|
||||
self.update = update
|
||||
|
||||
|
||||
_UpdateArg = Union[
|
||||
Mapping[Any, Any], List[Tuple[str, Any]], ColumnCollection[Any, Any]
|
||||
]
|
@ -0,0 +1,243 @@
|
||||
# dialects/mysql/enumerated.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from .types import _StringType
|
||||
from ... import exc
|
||||
from ... import sql
|
||||
from ... import util
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType):
|
||||
"""MySQL ENUM type."""
|
||||
|
||||
__visit_name__ = "ENUM"
|
||||
|
||||
native_enum = True
|
||||
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column("myenum", ENUM("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values in
|
||||
enums are not quoted, they will be escaped and surrounded by single
|
||||
quotes when generating the schema. This object may also be a
|
||||
PEP-435-compliant enumerated type.
|
||||
|
||||
.. versionadded: 1.1 added support for PEP-435-compliant enumerated
|
||||
types.
|
||||
|
||||
:param strict: This flag has no effect.
|
||||
|
||||
.. versionchanged:: The MySQL ENUM type as well as the base Enum
|
||||
type now validates all Python data values.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
kw.pop("strict", None)
|
||||
self._enum_init(enums, kw)
|
||||
_StringType.__init__(self, length=self.length, **kw)
|
||||
|
||||
@classmethod
|
||||
def adapt_emulated_to_native(cls, impl, **kw):
|
||||
"""Produce a MySQL native :class:`.mysql.ENUM` from plain
|
||||
:class:`.Enum`.
|
||||
|
||||
"""
|
||||
kw.setdefault("validate_strings", impl.validate_strings)
|
||||
kw.setdefault("values_callable", impl.values_callable)
|
||||
kw.setdefault("omit_aliases", impl._omit_aliases)
|
||||
return cls(**kw)
|
||||
|
||||
def _object_value_for_elem(self, elem):
|
||||
# mysql sends back a blank string for any value that
|
||||
# was persisted that was not in the enums; that is, it does no
|
||||
# validation on the incoming data, it "truncates" it to be
|
||||
# the blank string. Return it straight.
|
||||
if elem == "":
|
||||
return elem
|
||||
else:
|
||||
return super()._object_value_for_elem(elem)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[ENUM, _StringType, sqltypes.Enum]
|
||||
)
|
||||
|
||||
|
||||
class SET(_StringType):
|
||||
"""MySQL SET type."""
|
||||
|
||||
__visit_name__ = "SET"
|
||||
|
||||
def __init__(self, *values, **kw):
|
||||
"""Construct a SET.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column("myset", SET("foo", "bar", "baz"))
|
||||
|
||||
The list of potential values is required in the case that this
|
||||
set will be used to generate DDL for a table, or if the
|
||||
:paramref:`.SET.retrieve_as_bitwise` flag is set to True.
|
||||
|
||||
:param values: The range of valid values for this SET. The values
|
||||
are not quoted, they will be escaped and surrounded by single
|
||||
quotes when generating the schema.
|
||||
|
||||
:param convert_unicode: Same flag as that of
|
||||
:paramref:`.String.convert_unicode`.
|
||||
|
||||
:param collation: same as that of :paramref:`.String.collation`
|
||||
|
||||
:param charset: same as that of :paramref:`.VARCHAR.charset`.
|
||||
|
||||
:param ascii: same as that of :paramref:`.VARCHAR.ascii`.
|
||||
|
||||
:param unicode: same as that of :paramref:`.VARCHAR.unicode`.
|
||||
|
||||
:param binary: same as that of :paramref:`.VARCHAR.binary`.
|
||||
|
||||
:param retrieve_as_bitwise: if True, the data for the set type will be
|
||||
persisted and selected using an integer value, where a set is coerced
|
||||
into a bitwise mask for persistence. MySQL allows this mode which
|
||||
has the advantage of being able to store values unambiguously,
|
||||
such as the blank string ``''``. The datatype will appear
|
||||
as the expression ``col + 0`` in a SELECT statement, so that the
|
||||
value is coerced into an integer value in result sets.
|
||||
This flag is required if one wishes
|
||||
to persist a set that can store the blank string ``''`` as a value.
|
||||
|
||||
.. warning::
|
||||
|
||||
When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
|
||||
essential that the list of set values is expressed in the
|
||||
**exact same order** as exists on the MySQL database.
|
||||
|
||||
"""
|
||||
self.retrieve_as_bitwise = kw.pop("retrieve_as_bitwise", False)
|
||||
self.values = tuple(values)
|
||||
if not self.retrieve_as_bitwise and "" in values:
|
||||
raise exc.ArgumentError(
|
||||
"Can't use the blank value '' in a SET without "
|
||||
"setting retrieve_as_bitwise=True"
|
||||
)
|
||||
if self.retrieve_as_bitwise:
|
||||
self._bitmap = {
|
||||
value: 2**idx for idx, value in enumerate(self.values)
|
||||
}
|
||||
self._bitmap.update(
|
||||
(2**idx, value) for idx, value in enumerate(self.values)
|
||||
)
|
||||
length = max([len(v) for v in values] + [0])
|
||||
kw.setdefault("length", length)
|
||||
super().__init__(**kw)
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
if self.retrieve_as_bitwise:
|
||||
return sql.type_coerce(
|
||||
sql.type_coerce(colexpr, sqltypes.Integer) + 0, self
|
||||
)
|
||||
else:
|
||||
return colexpr
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.retrieve_as_bitwise:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = int(value)
|
||||
|
||||
return set(util.map_bits(self._bitmap.__getitem__, value))
|
||||
else:
|
||||
return None
|
||||
|
||||
else:
|
||||
super_convert = super().result_processor(dialect, coltype)
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, str):
|
||||
# MySQLdb returns a string, let's parse
|
||||
if super_convert:
|
||||
value = super_convert(value)
|
||||
return set(re.findall(r"[^,]+", value))
|
||||
else:
|
||||
# mysql-connector-python does a naive
|
||||
# split(",") which throws in an empty string
|
||||
if value is not None:
|
||||
value.discard("")
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_convert = super().bind_processor(dialect)
|
||||
if self.retrieve_as_bitwise:
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, (int, str)):
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
int_value = 0
|
||||
for v in value:
|
||||
int_value |= self._bitmap[v]
|
||||
return int_value
|
||||
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
# accept strings and int (actually bitflag) values directly
|
||||
if value is not None and not isinstance(value, (int, str)):
|
||||
value = ",".join(value)
|
||||
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def adapt(self, impltype, **kw):
|
||||
kw["retrieve_as_bitwise"] = self.retrieve_as_bitwise
|
||||
return util.constructor_copy(self, impltype, *self.values, **kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self,
|
||||
to_inspect=[SET, _StringType],
|
||||
additional_kw=[
|
||||
("retrieve_as_bitwise", False),
|
||||
],
|
||||
)
|
@ -0,0 +1,143 @@
|
||||
# dialects/mysql/expression.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
from ... import exc
|
||||
from ... import util
|
||||
from ...sql import coercions
|
||||
from ...sql import elements
|
||||
from ...sql import operators
|
||||
from ...sql import roles
|
||||
from ...sql.base import _generative
|
||||
from ...sql.base import Generative
|
||||
from ...util.typing import Self
|
||||
|
||||
|
||||
class match(Generative, elements.BinaryExpression):
|
||||
"""Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy.dialects.mysql import match
|
||||
|
||||
match_expr = match(
|
||||
users_table.c.firstname,
|
||||
users_table.c.lastname,
|
||||
against="Firstname Lastname",
|
||||
)
|
||||
|
||||
stmt = (
|
||||
select(users_table)
|
||||
.where(match_expr.in_boolean_mode())
|
||||
.order_by(desc(match_expr))
|
||||
)
|
||||
|
||||
Would produce SQL resembling:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT id, firstname, lastname
|
||||
FROM user
|
||||
WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE)
|
||||
ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC
|
||||
|
||||
The :func:`_mysql.match` function is a standalone version of the
|
||||
:meth:`_sql.ColumnElement.match` method available on all
|
||||
SQL expressions, as when :meth:`_expression.ColumnElement.match` is
|
||||
used, but allows to pass multiple columns
|
||||
|
||||
:param cols: column expressions to match against
|
||||
|
||||
:param against: expression to be compared towards
|
||||
|
||||
:param in_boolean_mode: boolean, set "boolean mode" to true
|
||||
|
||||
:param in_natural_language_mode: boolean , set "natural language" to true
|
||||
|
||||
:param with_query_expansion: boolean, set "query expansion" to true
|
||||
|
||||
.. versionadded:: 1.4.19
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_expression.ColumnElement.match`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "mysql_match"
|
||||
|
||||
inherit_cache = True
|
||||
|
||||
def __init__(self, *cols, **kw):
|
||||
if not cols:
|
||||
raise exc.ArgumentError("columns are required")
|
||||
|
||||
against = kw.pop("against", None)
|
||||
|
||||
if against is None:
|
||||
raise exc.ArgumentError("against is required")
|
||||
against = coercions.expect(
|
||||
roles.ExpressionElementRole,
|
||||
against,
|
||||
)
|
||||
|
||||
left = elements.BooleanClauseList._construct_raw(
|
||||
operators.comma_op,
|
||||
clauses=cols,
|
||||
)
|
||||
left.group = False
|
||||
|
||||
flags = util.immutabledict(
|
||||
{
|
||||
"mysql_boolean_mode": kw.pop("in_boolean_mode", False),
|
||||
"mysql_natural_language": kw.pop(
|
||||
"in_natural_language_mode", False
|
||||
),
|
||||
"mysql_query_expansion": kw.pop("with_query_expansion", False),
|
||||
}
|
||||
)
|
||||
|
||||
if kw:
|
||||
raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw)))
|
||||
|
||||
super().__init__(left, against, operators.match_op, modifiers=flags)
|
||||
|
||||
@_generative
|
||||
def in_boolean_mode(self) -> Self:
|
||||
"""Apply the "IN BOOLEAN MODE" modifier to the MATCH expression.
|
||||
|
||||
:return: a new :class:`_mysql.match` instance with modifications
|
||||
applied.
|
||||
"""
|
||||
|
||||
self.modifiers = self.modifiers.union({"mysql_boolean_mode": True})
|
||||
return self
|
||||
|
||||
@_generative
|
||||
def in_natural_language_mode(self) -> Self:
|
||||
"""Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH
|
||||
expression.
|
||||
|
||||
:return: a new :class:`_mysql.match` instance with modifications
|
||||
applied.
|
||||
"""
|
||||
|
||||
self.modifiers = self.modifiers.union({"mysql_natural_language": True})
|
||||
return self
|
||||
|
||||
@_generative
|
||||
def with_query_expansion(self) -> Self:
|
||||
"""Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression.
|
||||
|
||||
:return: a new :class:`_mysql.match` instance with modifications
|
||||
applied.
|
||||
"""
|
||||
|
||||
self.modifiers = self.modifiers.union({"mysql_query_expansion": True})
|
||||
return self
|
@ -0,0 +1,81 @@
|
||||
# dialects/mysql/json.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import types as sqltypes
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""MySQL JSON type.
|
||||
|
||||
MySQL supports JSON as of version 5.7.
|
||||
MariaDB supports JSON (as an alias for LONGTEXT) as of version 10.2.
|
||||
|
||||
:class:`_mysql.JSON` is used automatically whenever the base
|
||||
:class:`_types.JSON` datatype is used against a MySQL or MariaDB backend.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`_types.JSON` - main documentation for the generic
|
||||
cross-platform JSON datatype.
|
||||
|
||||
The :class:`.mysql.JSON` type supports persistence of JSON values
|
||||
as well as the core index operations provided by :class:`_types.JSON`
|
||||
datatype, by adapting the operations to render the ``JSON_EXTRACT``
|
||||
function at the database level.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _FormatTypeMixin:
|
||||
def _format_value(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
|
||||
def _format_value(self, value):
|
||||
if isinstance(value, int):
|
||||
value = "$[%s]" % value
|
||||
else:
|
||||
value = '$."%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
|
||||
def _format_value(self, value):
|
||||
return "$%s" % (
|
||||
"".join(
|
||||
[
|
||||
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
|
||||
for elem in value
|
||||
]
|
||||
)
|
||||
)
|
@ -0,0 +1,67 @@
|
||||
# dialects/mysql/mariadb.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
from .base import MariaDBIdentifierPreparer
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLTypeCompiler
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
class INET4(sqltypes.TypeEngine[str]):
|
||||
"""INET4 column type for MariaDB
|
||||
|
||||
.. versionadded:: 2.0.37
|
||||
"""
|
||||
|
||||
__visit_name__ = "INET4"
|
||||
|
||||
|
||||
class INET6(sqltypes.TypeEngine[str]):
|
||||
"""INET6 column type for MariaDB
|
||||
|
||||
.. versionadded:: 2.0.37
|
||||
"""
|
||||
|
||||
__visit_name__ = "INET6"
|
||||
|
||||
|
||||
class MariaDBTypeCompiler(MySQLTypeCompiler):
|
||||
def visit_INET4(self, type_, **kwargs) -> str:
|
||||
return "INET4"
|
||||
|
||||
def visit_INET6(self, type_, **kwargs) -> str:
|
||||
return "INET6"
|
||||
|
||||
|
||||
class MariaDBDialect(MySQLDialect):
|
||||
is_mariadb = True
|
||||
supports_statement_cache = True
|
||||
name = "mariadb"
|
||||
preparer = MariaDBIdentifierPreparer
|
||||
type_compiler_cls = MariaDBTypeCompiler
|
||||
|
||||
|
||||
def loader(driver):
|
||||
dialect_mod = __import__(
|
||||
"sqlalchemy.dialects.mysql.%s" % driver
|
||||
).dialects.mysql
|
||||
|
||||
driver_mod = getattr(dialect_mod, driver)
|
||||
if hasattr(driver_mod, "mariadb_dialect"):
|
||||
driver_cls = driver_mod.mariadb_dialect
|
||||
return driver_cls
|
||||
else:
|
||||
driver_cls = driver_mod.dialect
|
||||
|
||||
return type(
|
||||
"MariaDBDialect_%s" % driver,
|
||||
(
|
||||
MariaDBDialect,
|
||||
driver_cls,
|
||||
),
|
||||
{"supports_statement_cache": True},
|
||||
)
|
@ -0,0 +1,277 @@
|
||||
# dialects/mysql/mariadbconnector.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+mariadbconnector
|
||||
:name: MariaDB Connector/Python
|
||||
:dbapi: mariadb
|
||||
:connectstring: mariadb+mariadbconnector://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: https://pypi.org/project/mariadb/
|
||||
|
||||
Driver Status
|
||||
-------------
|
||||
|
||||
MariaDB Connector/Python enables Python programs to access MariaDB and MySQL
|
||||
databases using an API which is compliant with the Python DB API 2.0 (PEP-249).
|
||||
It is written in C and uses MariaDB Connector/C client library for client server
|
||||
communication.
|
||||
|
||||
Note that the default driver for a ``mariadb://`` connection URI continues to
|
||||
be ``mysqldb``. ``mariadb+mariadbconnector://`` is required to use this driver.
|
||||
|
||||
.. mariadb: https://github.com/mariadb-corporation/mariadb-connector-python
|
||||
|
||||
""" # noqa
|
||||
import re
|
||||
from uuid import UUID as _python_UUID
|
||||
|
||||
from .base import MySQLCompiler
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLExecutionContext
|
||||
from ... import sql
|
||||
from ... import util
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
mariadb_cpy_minimum_version = (1, 0, 1)
|
||||
|
||||
|
||||
class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]):
|
||||
# work around JIRA issue
|
||||
# https://jira.mariadb.org/browse/CONPY-270. When that issue is fixed,
|
||||
# this type can be removed.
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if hasattr(value, "decode"):
|
||||
value = value.decode("ascii")
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if hasattr(value, "decode"):
|
||||
value = value.decode("ascii")
|
||||
value = str(_python_UUID(value))
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
|
||||
_lastrowid = None
|
||||
|
||||
def create_server_side_cursor(self):
|
||||
return self._dbapi_connection.cursor(buffered=False)
|
||||
|
||||
def create_default_cursor(self):
|
||||
return self._dbapi_connection.cursor(buffered=True)
|
||||
|
||||
def post_exec(self):
|
||||
super().post_exec()
|
||||
|
||||
self._rowcount = self.cursor.rowcount
|
||||
|
||||
if self.isinsert and self.compiled.postfetch_lastrowid:
|
||||
self._lastrowid = self.cursor.lastrowid
|
||||
|
||||
def get_lastrowid(self):
|
||||
return self._lastrowid
|
||||
|
||||
|
||||
class MySQLCompiler_mariadbconnector(MySQLCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLDialect_mariadbconnector(MySQLDialect):
|
||||
driver = "mariadbconnector"
|
||||
supports_statement_cache = True
|
||||
|
||||
# set this to True at the module level to prevent the driver from running
|
||||
# against a backend that server detects as MySQL. currently this appears to
|
||||
# be unnecessary as MariaDB client libraries have always worked against
|
||||
# MySQL databases. However, if this changes at some point, this can be
|
||||
# adjusted, but PLEASE ADD A TEST in test/dialect/mysql/test_dialect.py if
|
||||
# this change is made at some point to ensure the correct exception
|
||||
# is raised at the correct point when running the driver against
|
||||
# a MySQL backend.
|
||||
# is_mariadb = True
|
||||
|
||||
supports_unicode_statements = True
|
||||
encoding = "utf8mb4"
|
||||
convert_unicode = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
supports_native_decimal = True
|
||||
default_paramstyle = "qmark"
|
||||
execution_ctx_cls = MySQLExecutionContext_mariadbconnector
|
||||
statement_compiler = MySQLCompiler_mariadbconnector
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs, {sqltypes.Uuid: _MariaDBUUID}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, "__version__"):
|
||||
return tuple(
|
||||
[
|
||||
int(x)
|
||||
for x in re.findall(
|
||||
r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.paramstyle = "qmark"
|
||||
if self.dbapi is not None:
|
||||
if self._dbapi_version < mariadb_cpy_minimum_version:
|
||||
raise NotImplementedError(
|
||||
"The minimum required version for MariaDB "
|
||||
"Connector/Python is %s"
|
||||
% ".".join(str(x) for x in mariadb_cpy_minimum_version)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return __import__("mariadb")
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if super().is_disconnect(e, connection, cursor):
|
||||
return True
|
||||
elif isinstance(e, self.dbapi.Error):
|
||||
str_e = str(e).lower()
|
||||
return "not connected" in str_e or "isn't valid" in str_e
|
||||
else:
|
||||
return False
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args()
|
||||
opts.update(url.query)
|
||||
|
||||
int_params = [
|
||||
"connect_timeout",
|
||||
"read_timeout",
|
||||
"write_timeout",
|
||||
"client_flag",
|
||||
"port",
|
||||
"pool_size",
|
||||
]
|
||||
bool_params = [
|
||||
"local_infile",
|
||||
"ssl_verify_cert",
|
||||
"ssl",
|
||||
"pool_reset_connection",
|
||||
"compress",
|
||||
]
|
||||
|
||||
for key in int_params:
|
||||
util.coerce_kw_type(opts, key, int)
|
||||
for key in bool_params:
|
||||
util.coerce_kw_type(opts, key, bool)
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get("client_flag", 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + ".constants.CLIENT"
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts["client_flag"] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
try:
|
||||
rc = exception.errno
|
||||
except:
|
||||
rc = -1
|
||||
return rc
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return "utf8mb4"
|
||||
|
||||
def get_isolation_level_values(self, dbapi_connection):
|
||||
return (
|
||||
"SERIALIZABLE",
|
||||
"READ UNCOMMITTED",
|
||||
"READ COMMITTED",
|
||||
"REPEATABLE READ",
|
||||
"AUTOCOMMIT",
|
||||
)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
if level == "AUTOCOMMIT":
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
super().set_isolation_level(connection, level)
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.execute(
|
||||
sql.text("XA BEGIN :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.execute(
|
||||
sql.text("XA END :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
connection.execute(
|
||||
sql.text("XA PREPARE :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
if not is_prepared:
|
||||
connection.execute(
|
||||
sql.text("XA END :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
connection.execute(
|
||||
sql.text("XA ROLLBACK :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
if not is_prepared:
|
||||
self.do_prepare_twophase(connection, xid)
|
||||
connection.execute(
|
||||
sql.text("XA COMMIT :xid").bindparams(
|
||||
sql.bindparam("xid", xid, literal_execute=True)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mariadbconnector
|
@ -0,0 +1,245 @@
|
||||
# dialects/mysql/mysqlconnector.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
r"""
|
||||
.. dialect:: mysql+mysqlconnector
|
||||
:name: MySQL Connector/Python
|
||||
:dbapi: myconnpy
|
||||
:connectstring: mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: https://pypi.org/project/mysql-connector-python/
|
||||
|
||||
Driver Status
|
||||
-------------
|
||||
|
||||
MySQL Connector/Python is supported as of SQLAlchemy 2.0.39 to the
|
||||
degree which the driver is functional. There are still ongoing issues
|
||||
with features such as server side cursors which remain disabled until
|
||||
upstream issues are repaired.
|
||||
|
||||
.. versionchanged:: 2.0.39
|
||||
|
||||
The MySQL Connector/Python dialect has been updated to support the
|
||||
latest version of this DBAPI. Previously, MySQL Connector/Python
|
||||
was not fully supported.
|
||||
|
||||
Connecting to MariaDB with MySQL Connector/Python
|
||||
--------------------------------------------------
|
||||
|
||||
MySQL Connector/Python may attempt to pass an incompatible collation to the
|
||||
database when connecting to MariaDB. Experimentation has shown that using
|
||||
``?charset=utf8mb4&collation=utfmb4_general_ci`` or similar MariaDB-compatible
|
||||
charset/collation will allow connectivity.
|
||||
|
||||
|
||||
""" # noqa
|
||||
|
||||
import re
|
||||
|
||||
from .base import BIT
|
||||
from .base import MariaDBIdentifierPreparer
|
||||
from .base import MySQLCompiler
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLExecutionContext
|
||||
from .base import MySQLIdentifierPreparer
|
||||
from .mariadb import MariaDBDialect
|
||||
from ... import util
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
||||
def create_server_side_cursor(self):
|
||||
return self._dbapi_connection.cursor(buffered=False)
|
||||
|
||||
def create_default_cursor(self):
|
||||
return self._dbapi_connection.cursor(buffered=True)
|
||||
|
||||
|
||||
class MySQLCompiler_mysqlconnector(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return (
|
||||
self.process(binary.left, **kw)
|
||||
+ " % "
|
||||
+ self.process(binary.right, **kw)
|
||||
)
|
||||
|
||||
|
||||
class IdentifierPreparerCommon_mysqlconnector:
|
||||
@property
|
||||
def _double_percents(self):
|
||||
return False
|
||||
|
||||
@_double_percents.setter
|
||||
def _double_percents(self, value):
|
||||
pass
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqlconnector(
|
||||
IdentifierPreparerCommon_mysqlconnector, MySQLIdentifierPreparer
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class MariaDBIdentifierPreparer_mysqlconnector(
|
||||
IdentifierPreparerCommon_mysqlconnector, MariaDBIdentifierPreparer
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class _myconnpyBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""MySQL-connector already converts mysql bits, so."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = "mysqlconnector"
|
||||
supports_statement_cache = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
supports_native_bit = True
|
||||
|
||||
# not until https://bugs.mysql.com/bug.php?id=117548
|
||||
supports_server_side_cursors = False
|
||||
|
||||
default_paramstyle = "format"
|
||||
statement_compiler = MySQLCompiler_mysqlconnector
|
||||
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
|
||||
|
||||
preparer = MySQLIdentifierPreparer_mysqlconnector
|
||||
|
||||
colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT})
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
from mysql import connector
|
||||
|
||||
return connector
|
||||
|
||||
def do_ping(self, dbapi_connection):
|
||||
dbapi_connection.ping(False)
|
||||
return True
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, "allow_local_infile", bool)
|
||||
util.coerce_kw_type(opts, "autocommit", bool)
|
||||
util.coerce_kw_type(opts, "buffered", bool)
|
||||
util.coerce_kw_type(opts, "client_flag", int)
|
||||
util.coerce_kw_type(opts, "compress", bool)
|
||||
util.coerce_kw_type(opts, "connection_timeout", int)
|
||||
util.coerce_kw_type(opts, "connect_timeout", int)
|
||||
util.coerce_kw_type(opts, "consume_results", bool)
|
||||
util.coerce_kw_type(opts, "force_ipv6", bool)
|
||||
util.coerce_kw_type(opts, "get_warnings", bool)
|
||||
util.coerce_kw_type(opts, "pool_reset_session", bool)
|
||||
util.coerce_kw_type(opts, "pool_size", int)
|
||||
util.coerce_kw_type(opts, "raise_on_warnings", bool)
|
||||
util.coerce_kw_type(opts, "raw", bool)
|
||||
util.coerce_kw_type(opts, "ssl_verify_cert", bool)
|
||||
util.coerce_kw_type(opts, "use_pure", bool)
|
||||
util.coerce_kw_type(opts, "use_unicode", bool)
|
||||
|
||||
# note that "buffered" is set to False by default in MySQL/connector
|
||||
# python. If you set it to True, then there is no way to get a server
|
||||
# side cursor because the logic is written to disallow that.
|
||||
|
||||
# leaving this at True until
|
||||
# https://bugs.mysql.com/bug.php?id=117548 can be fixed
|
||||
opts["buffered"] = True
|
||||
|
||||
# FOUND_ROWS must be set in ClientFlag to enable
|
||||
# supports_sane_rowcount.
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
from mysql.connector.constants import ClientFlag
|
||||
|
||||
client_flags = opts.get(
|
||||
"client_flags", ClientFlag.get_default()
|
||||
)
|
||||
client_flags |= ClientFlag.FOUND_ROWS
|
||||
opts["client_flags"] = client_flags
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return [[], opts]
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_version_info(self):
|
||||
if self.dbapi and hasattr(self.dbapi, "__version__"):
|
||||
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
|
||||
exceptions = (
|
||||
self.dbapi.OperationalError,
|
||||
self.dbapi.InterfaceError,
|
||||
self.dbapi.ProgrammingError,
|
||||
)
|
||||
if isinstance(e, exceptions):
|
||||
return (
|
||||
e.errno in errnos
|
||||
or "MySQL Connection not available." in str(e)
|
||||
or "Connection to MySQL is not available" in str(e)
|
||||
)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _compat_fetchall(self, rp, charset=None):
|
||||
return rp.fetchall()
|
||||
|
||||
def _compat_fetchone(self, rp, charset=None):
|
||||
return rp.fetchone()
|
||||
|
||||
def get_isolation_level_values(self, dbapi_connection):
|
||||
return (
|
||||
"SERIALIZABLE",
|
||||
"READ UNCOMMITTED",
|
||||
"READ COMMITTED",
|
||||
"REPEATABLE READ",
|
||||
"AUTOCOMMIT",
|
||||
)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
if level == "AUTOCOMMIT":
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
super().set_isolation_level(connection, level)
|
||||
|
||||
|
||||
class MariaDBDialect_mysqlconnector(
|
||||
MariaDBDialect, MySQLDialect_mysqlconnector
|
||||
):
|
||||
supports_statement_cache = True
|
||||
_allows_uuid_binds = False
|
||||
preparer = MariaDBIdentifierPreparer_mysqlconnector
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqlconnector
|
||||
mariadb_dialect = MariaDBDialect_mysqlconnector
|
@ -0,0 +1,305 @@
|
||||
# dialects/mysql/mysqldb.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+mysqldb
|
||||
:name: mysqlclient (maintained fork of MySQL-Python)
|
||||
:dbapi: mysqldb
|
||||
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: https://pypi.org/project/mysqlclient/
|
||||
|
||||
Driver Status
|
||||
-------------
|
||||
|
||||
The mysqlclient DBAPI is a maintained fork of the
|
||||
`MySQL-Python <https://sourceforge.net/projects/mysql-python>`_ DBAPI
|
||||
that is no longer maintained. `mysqlclient`_ supports Python 2 and Python 3
|
||||
and is very stable.
|
||||
|
||||
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
|
||||
|
||||
.. _mysqldb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
.. _mysqldb_ssl:
|
||||
|
||||
SSL Connections
|
||||
----------------
|
||||
|
||||
The mysqlclient and PyMySQL DBAPIs accept an additional dictionary under the
|
||||
key "ssl", which may be specified using the
|
||||
:paramref:`_sa.create_engine.connect_args` dictionary::
|
||||
|
||||
engine = create_engine(
|
||||
"mysql+mysqldb://scott:tiger@192.168.0.134/test",
|
||||
connect_args={
|
||||
"ssl": {
|
||||
"ca": "/home/gord/client-ssl/ca.pem",
|
||||
"cert": "/home/gord/client-ssl/client-cert.pem",
|
||||
"key": "/home/gord/client-ssl/client-key.pem",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
For convenience, the following keys may also be specified inline within the URL
|
||||
where they will be interpreted into the "ssl" dictionary automatically:
|
||||
"ssl_ca", "ssl_cert", "ssl_key", "ssl_capath", "ssl_cipher",
|
||||
"ssl_check_hostname". An example is as follows::
|
||||
|
||||
connection_uri = (
|
||||
"mysql+mysqldb://scott:tiger@192.168.0.134/test"
|
||||
"?ssl_ca=/home/gord/client-ssl/ca.pem"
|
||||
"&ssl_cert=/home/gord/client-ssl/client-cert.pem"
|
||||
"&ssl_key=/home/gord/client-ssl/client-key.pem"
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pymysql_ssl` in the PyMySQL dialect
|
||||
|
||||
|
||||
Using MySQLdb with Google Cloud SQL
|
||||
-----------------------------------
|
||||
|
||||
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
|
||||
using a URL like the following:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
|
||||
|
||||
Server Side Cursors
|
||||
-------------------
|
||||
|
||||
The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`.
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .base import MySQLCompiler
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLExecutionContext
|
||||
from .base import MySQLIdentifierPreparer
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
from ... import util
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLCompiler_mysqldb(MySQLCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLDialect_mysqldb(MySQLDialect):
|
||||
driver = "mysqldb"
|
||||
supports_statement_cache = True
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = "format"
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqldb
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self._mysql_dbapi_version = (
|
||||
self._parse_dbapi_version(self.dbapi.__version__)
|
||||
if self.dbapi is not None and hasattr(self.dbapi, "__version__")
|
||||
else (0, 0, 0)
|
||||
)
|
||||
|
||||
def _parse_dbapi_version(self, version):
|
||||
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
|
||||
else:
|
||||
return (0, 0, 0)
|
||||
|
||||
@util.langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__("MySQLdb.cursors").cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return __import__("MySQLdb")
|
||||
|
||||
def on_connect(self):
|
||||
super_ = super().on_connect()
|
||||
|
||||
def on_connect(conn):
|
||||
if super_ is not None:
|
||||
super_(conn)
|
||||
|
||||
charset_name = conn.character_set_name()
|
||||
|
||||
if charset_name is not None:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SET NAMES %s" % charset_name)
|
||||
cursor.close()
|
||||
|
||||
return on_connect
|
||||
|
||||
def do_ping(self, dbapi_connection):
|
||||
dbapi_connection.ping()
|
||||
return True
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8mb4_bin collation and unicode returns
|
||||
|
||||
collation = connection.exec_driver_sql(
|
||||
"show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation"),
|
||||
)
|
||||
).scalar()
|
||||
has_utf8mb4_bin = self.server_version_info > (5,) and collation
|
||||
if has_utf8mb4_bin:
|
||||
additional_tests = [
|
||||
sql.collate(
|
||||
sql.cast(
|
||||
sql.literal_column("'test collated returns'"),
|
||||
TEXT(charset="utf8mb4"),
|
||||
),
|
||||
"utf8mb4_bin",
|
||||
)
|
||||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super()._check_unicode_returns(connection, additional_tests)
|
||||
|
||||
def create_connect_args(self, url, _translate_args=None):
|
||||
if _translate_args is None:
|
||||
_translate_args = dict(
|
||||
database="db", username="user", password="passwd"
|
||||
)
|
||||
|
||||
opts = url.translate_connect_args(**_translate_args)
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, "compress", bool)
|
||||
util.coerce_kw_type(opts, "connect_timeout", int)
|
||||
util.coerce_kw_type(opts, "read_timeout", int)
|
||||
util.coerce_kw_type(opts, "write_timeout", int)
|
||||
util.coerce_kw_type(opts, "client_flag", int)
|
||||
util.coerce_kw_type(opts, "local_infile", bool)
|
||||
# Note: using either of the below will cause all strings to be
|
||||
# returned as Unicode, both in raw SQL operations and with column
|
||||
# types like String and MSString.
|
||||
util.coerce_kw_type(opts, "use_unicode", bool)
|
||||
util.coerce_kw_type(opts, "charset", str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = [
|
||||
("ssl_ca", str),
|
||||
("ssl_key", str),
|
||||
("ssl_cert", str),
|
||||
("ssl_capath", str),
|
||||
("ssl_cipher", str),
|
||||
("ssl_check_hostname", bool),
|
||||
]
|
||||
for key, kw_type in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], kw_type)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts["ssl"] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get("client_flag", 0)
|
||||
|
||||
client_flag_found_rows = self._found_rows_client_flag()
|
||||
if client_flag_found_rows is not None:
|
||||
client_flag |= client_flag_found_rows
|
||||
opts["client_flag"] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _found_rows_client_flag(self):
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + ".constants.CLIENT"
|
||||
).constants.CLIENT
|
||||
except (AttributeError, ImportError):
|
||||
return None
|
||||
else:
|
||||
return CLIENT_FLAGS.FOUND_ROWS
|
||||
else:
|
||||
return None
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1."
|
||||
)
|
||||
return "latin1"
|
||||
else:
|
||||
return cset_name()
|
||||
|
||||
def get_isolation_level_values(self, dbapi_connection):
|
||||
return (
|
||||
"SERIALIZABLE",
|
||||
"READ UNCOMMITTED",
|
||||
"READ COMMITTED",
|
||||
"REPEATABLE READ",
|
||||
"AUTOCOMMIT",
|
||||
)
|
||||
|
||||
def set_isolation_level(self, dbapi_connection, level):
|
||||
if level == "AUTOCOMMIT":
|
||||
dbapi_connection.autocommit(True)
|
||||
else:
|
||||
dbapi_connection.autocommit(False)
|
||||
super().set_isolation_level(dbapi_connection, level)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
@ -0,0 +1,114 @@
|
||||
# dialects/mysql/provision.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import exc
|
||||
from ...testing.provision import configure_follower
|
||||
from ...testing.provision import create_db
|
||||
from ...testing.provision import drop_db
|
||||
from ...testing.provision import generate_driver_url
|
||||
from ...testing.provision import temp_table_keyword_args
|
||||
from ...testing.provision import upsert
|
||||
|
||||
|
||||
@generate_driver_url.for_db("mysql", "mariadb")
|
||||
def generate_driver_url(url, driver, query_str):
|
||||
backend = url.get_backend_name()
|
||||
|
||||
# NOTE: at the moment, tests are running mariadbconnector
|
||||
# against both mariadb and mysql backends. if we want this to be
|
||||
# limited, do the decision making here to reject a "mysql+mariadbconnector"
|
||||
# URL. Optionally also re-enable the module level
|
||||
# MySQLDialect_mariadbconnector.is_mysql flag as well, which must include
|
||||
# a unit and/or functional test.
|
||||
|
||||
# all the Jenkins tests have been running mysqlclient Python library
|
||||
# built against mariadb client drivers for years against all MySQL /
|
||||
# MariaDB versions going back to MySQL 5.6, currently they can talk
|
||||
# to MySQL databases without problems.
|
||||
|
||||
if backend == "mysql":
|
||||
dialect_cls = url.get_dialect()
|
||||
if dialect_cls._is_mariadb_from_url(url):
|
||||
backend = "mariadb"
|
||||
|
||||
new_url = url.set(
|
||||
drivername="%s+%s" % (backend, driver)
|
||||
).update_query_string(query_str)
|
||||
|
||||
if driver == "mariadbconnector":
|
||||
new_url = new_url.difference_update_query(["charset"])
|
||||
elif driver == "mysqlconnector":
|
||||
new_url = new_url.update_query_pairs(
|
||||
[("collation", "utf8mb4_general_ci")]
|
||||
)
|
||||
|
||||
try:
|
||||
new_url.get_dialect()
|
||||
except exc.NoSuchModuleError:
|
||||
return None
|
||||
else:
|
||||
return new_url
|
||||
|
||||
|
||||
@create_db.for_db("mysql", "mariadb")
|
||||
def _mysql_create_db(cfg, eng, ident):
|
||||
with eng.begin() as conn:
|
||||
try:
|
||||
_mysql_drop_db(cfg, conn, ident)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
with eng.begin() as conn:
|
||||
conn.exec_driver_sql(
|
||||
"CREATE DATABASE %s CHARACTER SET utf8mb4" % ident
|
||||
)
|
||||
conn.exec_driver_sql(
|
||||
"CREATE DATABASE %s_test_schema CHARACTER SET utf8mb4" % ident
|
||||
)
|
||||
conn.exec_driver_sql(
|
||||
"CREATE DATABASE %s_test_schema_2 CHARACTER SET utf8mb4" % ident
|
||||
)
|
||||
|
||||
|
||||
@configure_follower.for_db("mysql", "mariadb")
|
||||
def _mysql_configure_follower(config, ident):
|
||||
config.test_schema = "%s_test_schema" % ident
|
||||
config.test_schema_2 = "%s_test_schema_2" % ident
|
||||
|
||||
|
||||
@drop_db.for_db("mysql", "mariadb")
|
||||
def _mysql_drop_db(cfg, eng, ident):
|
||||
with eng.begin() as conn:
|
||||
conn.exec_driver_sql("DROP DATABASE %s_test_schema" % ident)
|
||||
conn.exec_driver_sql("DROP DATABASE %s_test_schema_2" % ident)
|
||||
conn.exec_driver_sql("DROP DATABASE %s" % ident)
|
||||
|
||||
|
||||
@temp_table_keyword_args.for_db("mysql", "mariadb")
|
||||
def _mysql_temp_table_keyword_args(cfg, eng):
|
||||
return {"prefixes": ["TEMPORARY"]}
|
||||
|
||||
|
||||
@upsert.for_db("mariadb")
|
||||
def _upsert(
|
||||
cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False
|
||||
):
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
|
||||
stmt = insert(table)
|
||||
|
||||
if set_lambda:
|
||||
stmt = stmt.on_duplicate_key_update(**set_lambda(stmt.inserted))
|
||||
else:
|
||||
pk1 = table.primary_key.c[0]
|
||||
stmt = stmt.on_duplicate_key_update({pk1.key: pk1})
|
||||
|
||||
stmt = stmt.returning(
|
||||
*returning, sort_by_parameter_order=sort_by_parameter_order
|
||||
)
|
||||
return stmt
|
@ -0,0 +1,136 @@
|
||||
# dialects/mysql/pymysql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
r"""
|
||||
|
||||
.. dialect:: mysql+pymysql
|
||||
:name: PyMySQL
|
||||
:dbapi: pymysql
|
||||
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>[?<options>]
|
||||
:url: https://pymysql.readthedocs.io/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
.. _pymysql_ssl:
|
||||
|
||||
SSL Connections
|
||||
------------------
|
||||
|
||||
The PyMySQL DBAPI accepts the same SSL arguments as that of MySQLdb,
|
||||
described at :ref:`mysqldb_ssl`. See that section for additional examples.
|
||||
|
||||
If the server uses an automatically-generated certificate that is self-signed
|
||||
or does not match the host name (as seen from the client), it may also be
|
||||
necessary to indicate ``ssl_check_hostname=false`` in PyMySQL::
|
||||
|
||||
connection_uri = (
|
||||
"mysql+pymysql://scott:tiger@192.168.0.134/test"
|
||||
"?ssl_ca=/home/gord/client-ssl/ca.pem"
|
||||
"&ssl_cert=/home/gord/client-ssl/client-cert.pem"
|
||||
"&ssl_key=/home/gord/client-ssl/client-key.pem"
|
||||
"&ssl_check_hostname=false"
|
||||
)
|
||||
|
||||
MySQL-Python Compatibility
|
||||
--------------------------
|
||||
|
||||
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
|
||||
and targets 100% compatibility. Most behavioral notes for MySQL-python apply
|
||||
to the pymysql driver as well.
|
||||
|
||||
""" # noqa
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...util import langhelpers
|
||||
|
||||
|
||||
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
||||
driver = "pymysql"
|
||||
supports_statement_cache = True
|
||||
|
||||
description_encoding = None
|
||||
|
||||
@langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__("pymysql.cursors").cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
return __import__("pymysql")
|
||||
|
||||
@langhelpers.memoized_property
|
||||
def _send_false_to_ping(self):
|
||||
"""determine if pymysql has deprecated, changed the default of,
|
||||
or removed the 'reconnect' argument of connection.ping().
|
||||
|
||||
See #10492 and
|
||||
https://github.com/PyMySQL/mysqlclient/discussions/651#discussioncomment-7308971
|
||||
for background.
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
try:
|
||||
Connection = __import__(
|
||||
"pymysql.connections"
|
||||
).connections.Connection
|
||||
except (ImportError, AttributeError):
|
||||
return True
|
||||
else:
|
||||
insp = langhelpers.get_callable_argspec(Connection.ping)
|
||||
try:
|
||||
reconnect_arg = insp.args[1]
|
||||
except IndexError:
|
||||
return False
|
||||
else:
|
||||
return reconnect_arg == "reconnect" and (
|
||||
not insp.defaults or insp.defaults[0] is not False
|
||||
)
|
||||
|
||||
def do_ping(self, dbapi_connection):
|
||||
if self._send_false_to_ping:
|
||||
dbapi_connection.ping(False)
|
||||
else:
|
||||
dbapi_connection.ping()
|
||||
|
||||
return True
|
||||
|
||||
def create_connect_args(self, url, _translate_args=None):
|
||||
if _translate_args is None:
|
||||
_translate_args = dict(username="user")
|
||||
return super().create_connect_args(
|
||||
url, _translate_args=_translate_args
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if super().is_disconnect(e, connection, cursor):
|
||||
return True
|
||||
elif isinstance(e, self.dbapi.Error):
|
||||
str_e = str(e).lower()
|
||||
return (
|
||||
"already closed" in str_e or "connection was killed" in str_e
|
||||
)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
if isinstance(exception.args[0], Exception):
|
||||
exception = exception.args[0]
|
||||
return exception.args[0]
|
||||
|
||||
|
||||
dialect = MySQLDialect_pymysql
|
@ -0,0 +1,139 @@
|
||||
# dialects/mysql/pyodbc.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
r"""
|
||||
|
||||
|
||||
.. dialect:: mysql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: https://pypi.org/project/pyodbc/
|
||||
|
||||
.. note::
|
||||
|
||||
The PyODBC for MySQL dialect is **not tested as part of
|
||||
SQLAlchemy's continuous integration**.
|
||||
The recommended MySQL dialects are mysqlclient and PyMySQL.
|
||||
However, if you want to use the mysql+pyodbc dialect and require
|
||||
full support for ``utf8mb4`` characters (including supplementary
|
||||
characters like emoji) be sure to use a current release of
|
||||
MySQL Connector/ODBC and specify the "ANSI" (**not** "Unicode")
|
||||
version of the driver in your DSN or connection string.
|
||||
|
||||
Pass through exact pyodbc connection string::
|
||||
|
||||
import urllib
|
||||
|
||||
connection_string = (
|
||||
"DRIVER=MySQL ODBC 8.0 ANSI Driver;"
|
||||
"SERVER=localhost;"
|
||||
"PORT=3307;"
|
||||
"DATABASE=mydb;"
|
||||
"UID=root;"
|
||||
"PWD=(whatever);"
|
||||
"charset=utf8mb4;"
|
||||
)
|
||||
params = urllib.parse.quote_plus(connection_string)
|
||||
connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params
|
||||
|
||||
""" # noqa
|
||||
|
||||
import re
|
||||
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLExecutionContext
|
||||
from .types import TIME
|
||||
from ... import exc
|
||||
from ... import util
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ...sql.sqltypes import Time
|
||||
|
||||
|
||||
class _pyodbcTIME(TIME):
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
# pyodbc returns a datetime.time object; no need to convert
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT LAST_INSERT_ID()")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
|
||||
supports_statement_cache = True
|
||||
colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME})
|
||||
supports_unicode_statements = True
|
||||
execution_ctx_cls = MySQLExecutionContext_pyodbc
|
||||
|
||||
pyodbc_driver_name = "MySQL"
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
# Prefer 'character_set_results' for the current connection over the
|
||||
# value in the driver. SET NAMES or individual variable SETs will
|
||||
# change the charset without updating the driver's view of the world.
|
||||
#
|
||||
# If it's decided that issuing that sort of SQL leaves you SOL, then
|
||||
# this can prefer the driver value.
|
||||
|
||||
# set this to None as _fetch_setting attempts to use it (None is OK)
|
||||
self._connection_charset = None
|
||||
try:
|
||||
value = self._fetch_setting(connection, "character_set_client")
|
||||
if value:
|
||||
return value
|
||||
except exc.DBAPIError:
|
||||
pass
|
||||
|
||||
util.warn(
|
||||
"Could not detect the connection character set. "
|
||||
"Assuming latin1."
|
||||
)
|
||||
return "latin1"
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return MySQLDialect._get_server_version_info(self, connection)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
m = re.compile(r"\((\d+)\)").search(str(exception.args))
|
||||
c = m.group(1)
|
||||
if c:
|
||||
return int(c)
|
||||
else:
|
||||
return None
|
||||
|
||||
def on_connect(self):
|
||||
super_ = super().on_connect()
|
||||
|
||||
def on_connect(conn):
|
||||
if super_ is not None:
|
||||
super_(conn)
|
||||
|
||||
# declare Unicode encoding for pyodbc as per
|
||||
# https://github.com/mkleehammer/pyodbc/wiki/Unicode
|
||||
pyodbc_SQL_CHAR = 1 # pyodbc.SQL_CHAR
|
||||
pyodbc_SQL_WCHAR = -8 # pyodbc.SQL_WCHAR
|
||||
conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8")
|
||||
conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8")
|
||||
conn.setencoding(encoding="utf-8")
|
||||
|
||||
return on_connect
|
||||
|
||||
|
||||
dialect = MySQLDialect_pyodbc
|
@ -0,0 +1,677 @@
|
||||
# dialects/mysql/reflection.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from .enumerated import ENUM
|
||||
from .enumerated import SET
|
||||
from .types import DATETIME
|
||||
from .types import TIME
|
||||
from .types import TIMESTAMP
|
||||
from ... import log
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
|
||||
|
||||
class ReflectedState:
|
||||
"""Stores raw information about a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self):
|
||||
self.columns = []
|
||||
self.table_options = {}
|
||||
self.table_name = None
|
||||
self.keys = []
|
||||
self.fk_constraints = []
|
||||
self.ck_constraints = []
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class MySQLTableDefinitionParser:
|
||||
"""Parses the results of a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self, dialect, preparer):
|
||||
self.dialect = dialect
|
||||
self.preparer = preparer
|
||||
self._prep_regexes()
|
||||
|
||||
def parse(self, show_create, charset):
|
||||
state = ReflectedState()
|
||||
state.charset = charset
|
||||
for line in re.split(r"\r?\n", show_create):
|
||||
if line.startswith(" " + self.preparer.initial_quote):
|
||||
self._parse_column(line, state)
|
||||
# a regular table options line
|
||||
elif line.startswith(") "):
|
||||
self._parse_table_options(line, state)
|
||||
# an ANSI-mode table options line
|
||||
elif line == ")":
|
||||
pass
|
||||
elif line.startswith("CREATE "):
|
||||
self._parse_table_name(line, state)
|
||||
elif "PARTITION" in line:
|
||||
self._parse_partition_options(line, state)
|
||||
# Not present in real reflection, but may be if
|
||||
# loading from a file.
|
||||
elif not line:
|
||||
pass
|
||||
else:
|
||||
type_, spec = self._parse_constraints(line)
|
||||
if type_ is None:
|
||||
util.warn("Unknown schema content: %r" % line)
|
||||
elif type_ == "key":
|
||||
state.keys.append(spec)
|
||||
elif type_ == "fk_constraint":
|
||||
state.fk_constraints.append(spec)
|
||||
elif type_ == "ck_constraint":
|
||||
state.ck_constraints.append(spec)
|
||||
else:
|
||||
pass
|
||||
return state
|
||||
|
||||
def _check_view(self, sql: str) -> bool:
|
||||
return bool(self._re_is_view.match(sql))
|
||||
|
||||
def _parse_constraints(self, line):
|
||||
"""Parse a KEY or CONSTRAINT line.
|
||||
|
||||
:param line: A line of SHOW CREATE TABLE output
|
||||
"""
|
||||
|
||||
# KEY
|
||||
m = self._re_key.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
# convert columns into name, length pairs
|
||||
# NOTE: we may want to consider SHOW INDEX as the
|
||||
# format of indexes in MySQL becomes more complex
|
||||
spec["columns"] = self._parse_keyexprs(spec["columns"])
|
||||
if spec["version_sql"]:
|
||||
m2 = self._re_key_version_sql.match(spec["version_sql"])
|
||||
if m2 and m2.groupdict()["parser"]:
|
||||
spec["parser"] = m2.groupdict()["parser"]
|
||||
if spec["parser"]:
|
||||
spec["parser"] = self.preparer.unformat_identifiers(
|
||||
spec["parser"]
|
||||
)[0]
|
||||
return "key", spec
|
||||
|
||||
# FOREIGN KEY CONSTRAINT
|
||||
m = self._re_fk_constraint.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec["table"] = self.preparer.unformat_identifiers(spec["table"])
|
||||
spec["local"] = [c[0] for c in self._parse_keyexprs(spec["local"])]
|
||||
spec["foreign"] = [
|
||||
c[0] for c in self._parse_keyexprs(spec["foreign"])
|
||||
]
|
||||
return "fk_constraint", spec
|
||||
|
||||
# CHECK constraint
|
||||
m = self._re_ck_constraint.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
return "ck_constraint", spec
|
||||
|
||||
# PARTITION and SUBPARTITION
|
||||
m = self._re_partition.match(line)
|
||||
if m:
|
||||
# Punt!
|
||||
return "partition", line
|
||||
|
||||
# No match.
|
||||
return (None, line)
|
||||
|
||||
def _parse_table_name(self, line, state):
|
||||
"""Extract the table name.
|
||||
|
||||
:param line: The first line of SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
regex, cleanup = self._pr_name
|
||||
m = regex.match(line)
|
||||
if m:
|
||||
state.table_name = cleanup(m.group("name"))
|
||||
|
||||
def _parse_table_options(self, line, state):
|
||||
"""Build a dictionary of all reflected table-level options.
|
||||
|
||||
:param line: The final line of SHOW CREATE TABLE output.
|
||||
"""
|
||||
|
||||
options = {}
|
||||
|
||||
if line and line != ")":
|
||||
rest_of_line = line
|
||||
for regex, cleanup in self._pr_options:
|
||||
m = regex.search(rest_of_line)
|
||||
if not m:
|
||||
continue
|
||||
directive, value = m.group("directive"), m.group("val")
|
||||
if cleanup:
|
||||
value = cleanup(value)
|
||||
options[directive.lower()] = value
|
||||
rest_of_line = regex.sub("", rest_of_line)
|
||||
|
||||
for nope in ("auto_increment", "data directory", "index directory"):
|
||||
options.pop(nope, None)
|
||||
|
||||
for opt, val in options.items():
|
||||
state.table_options["%s_%s" % (self.dialect.name, opt)] = val
|
||||
|
||||
def _parse_partition_options(self, line, state):
|
||||
options = {}
|
||||
new_line = line[:]
|
||||
|
||||
while new_line.startswith("(") or new_line.startswith(" "):
|
||||
new_line = new_line[1:]
|
||||
|
||||
for regex, cleanup in self._pr_options:
|
||||
m = regex.search(new_line)
|
||||
if not m or "PARTITION" not in regex.pattern:
|
||||
continue
|
||||
|
||||
directive = m.group("directive")
|
||||
directive = directive.lower()
|
||||
is_subpartition = directive == "subpartition"
|
||||
|
||||
if directive == "partition" or is_subpartition:
|
||||
new_line = new_line.replace(") */", "")
|
||||
new_line = new_line.replace(",", "")
|
||||
if is_subpartition and new_line.endswith(")"):
|
||||
new_line = new_line[:-1]
|
||||
if self.dialect.name == "mariadb" and new_line.endswith(")"):
|
||||
if (
|
||||
"MAXVALUE" in new_line
|
||||
or "MINVALUE" in new_line
|
||||
or "ENGINE" in new_line
|
||||
):
|
||||
# final line of MariaDB partition endswith ")"
|
||||
new_line = new_line[:-1]
|
||||
|
||||
defs = "%s_%s_definitions" % (self.dialect.name, directive)
|
||||
options[defs] = new_line
|
||||
|
||||
else:
|
||||
directive = directive.replace(" ", "_")
|
||||
value = m.group("val")
|
||||
if cleanup:
|
||||
value = cleanup(value)
|
||||
options[directive] = value
|
||||
break
|
||||
|
||||
for opt, val in options.items():
|
||||
part_def = "%s_partition_definitions" % (self.dialect.name)
|
||||
subpart_def = "%s_subpartition_definitions" % (self.dialect.name)
|
||||
if opt == part_def or opt == subpart_def:
|
||||
# builds a string of definitions
|
||||
if opt not in state.table_options:
|
||||
state.table_options[opt] = val
|
||||
else:
|
||||
state.table_options[opt] = "%s, %s" % (
|
||||
state.table_options[opt],
|
||||
val,
|
||||
)
|
||||
else:
|
||||
state.table_options["%s_%s" % (self.dialect.name, opt)] = val
|
||||
|
||||
def _parse_column(self, line, state):
|
||||
"""Extract column details.
|
||||
|
||||
Falls back to a 'minimal support' variant if full parse fails.
|
||||
|
||||
:param line: Any column-bearing line from SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
spec = None
|
||||
m = self._re_column.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec["full"] = True
|
||||
else:
|
||||
m = self._re_column_loose.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec["full"] = False
|
||||
if not spec:
|
||||
util.warn("Unknown column definition %r" % line)
|
||||
return
|
||||
if not spec["full"]:
|
||||
util.warn("Incomplete reflection of column definition %r" % line)
|
||||
|
||||
name, type_, args = spec["name"], spec["coltype"], spec["arg"]
|
||||
|
||||
try:
|
||||
col_type = self.dialect.ischema_names[type_]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"Did not recognize type '%s' of column '%s'" % (type_, name)
|
||||
)
|
||||
col_type = sqltypes.NullType
|
||||
|
||||
# Column type positional arguments eg. varchar(32)
|
||||
if args is None or args == "":
|
||||
type_args = []
|
||||
elif args[0] == "'" and args[-1] == "'":
|
||||
type_args = self._re_csv_str.findall(args)
|
||||
else:
|
||||
type_args = [int(v) for v in self._re_csv_int.findall(args)]
|
||||
|
||||
# Column type keyword options
|
||||
type_kw = {}
|
||||
|
||||
if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
|
||||
if type_args:
|
||||
type_kw["fsp"] = type_args.pop(0)
|
||||
|
||||
for kw in ("unsigned", "zerofill"):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = True
|
||||
for kw in ("charset", "collate"):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = spec[kw]
|
||||
if issubclass(col_type, (ENUM, SET)):
|
||||
type_args = _strip_values(type_args)
|
||||
|
||||
if issubclass(col_type, SET) and "" in type_args:
|
||||
type_kw["retrieve_as_bitwise"] = True
|
||||
|
||||
type_instance = col_type(*type_args, **type_kw)
|
||||
|
||||
col_kw = {}
|
||||
|
||||
# NOT NULL
|
||||
col_kw["nullable"] = True
|
||||
# this can be "NULL" in the case of TIMESTAMP
|
||||
if spec.get("notnull", False) == "NOT NULL":
|
||||
col_kw["nullable"] = False
|
||||
# For generated columns, the nullability is marked in a different place
|
||||
if spec.get("notnull_generated", False) == "NOT NULL":
|
||||
col_kw["nullable"] = False
|
||||
|
||||
# AUTO_INCREMENT
|
||||
if spec.get("autoincr", False):
|
||||
col_kw["autoincrement"] = True
|
||||
elif issubclass(col_type, sqltypes.Integer):
|
||||
col_kw["autoincrement"] = False
|
||||
|
||||
# DEFAULT
|
||||
default = spec.get("default", None)
|
||||
|
||||
if default == "NULL":
|
||||
# eliminates the need to deal with this later.
|
||||
default = None
|
||||
|
||||
comment = spec.get("comment", None)
|
||||
|
||||
if comment is not None:
|
||||
comment = cleanup_text(comment)
|
||||
|
||||
sqltext = spec.get("generated")
|
||||
if sqltext is not None:
|
||||
computed = dict(sqltext=sqltext)
|
||||
persisted = spec.get("persistence")
|
||||
if persisted is not None:
|
||||
computed["persisted"] = persisted == "STORED"
|
||||
col_kw["computed"] = computed
|
||||
|
||||
col_d = dict(
|
||||
name=name, type=type_instance, default=default, comment=comment
|
||||
)
|
||||
col_d.update(col_kw)
|
||||
state.columns.append(col_d)
|
||||
|
||||
def _describe_to_create(self, table_name, columns):
|
||||
"""Re-format DESCRIBE output as a SHOW CREATE TABLE string.
|
||||
|
||||
DESCRIBE is a much simpler reflection and is sufficient for
|
||||
reflecting views for runtime use. This method formats DDL
|
||||
for columns only- keys are omitted.
|
||||
|
||||
:param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
|
||||
SHOW FULL COLUMNS FROM rows must be rearranged for use with
|
||||
this function.
|
||||
"""
|
||||
|
||||
buffer = []
|
||||
for row in columns:
|
||||
(name, col_type, nullable, default, extra) = (
|
||||
row[i] for i in (0, 1, 2, 4, 5)
|
||||
)
|
||||
|
||||
line = [" "]
|
||||
line.append(self.preparer.quote_identifier(name))
|
||||
line.append(col_type)
|
||||
if not nullable:
|
||||
line.append("NOT NULL")
|
||||
if default:
|
||||
if "auto_increment" in default:
|
||||
pass
|
||||
elif col_type.startswith("timestamp") and default.startswith(
|
||||
"C"
|
||||
):
|
||||
line.append("DEFAULT")
|
||||
line.append(default)
|
||||
elif default == "NULL":
|
||||
line.append("DEFAULT")
|
||||
line.append(default)
|
||||
else:
|
||||
line.append("DEFAULT")
|
||||
line.append("'%s'" % default.replace("'", "''"))
|
||||
if extra:
|
||||
line.append(extra)
|
||||
|
||||
buffer.append(" ".join(line))
|
||||
|
||||
return "".join(
|
||||
[
|
||||
(
|
||||
"CREATE TABLE %s (\n"
|
||||
% self.preparer.quote_identifier(table_name)
|
||||
),
|
||||
",\n".join(buffer),
|
||||
"\n) ",
|
||||
]
|
||||
)
|
||||
|
||||
def _parse_keyexprs(self, identifiers):
|
||||
"""Unpack '"col"(2),"col" ASC'-ish strings into components."""
|
||||
|
||||
return [
|
||||
(colname, int(length) if length else None, modifiers)
|
||||
for colname, length, modifiers in self._re_keyexprs.findall(
|
||||
identifiers
|
||||
)
|
||||
]
|
||||
|
||||
def _prep_regexes(self):
|
||||
"""Pre-compile regular expressions."""
|
||||
|
||||
self._re_columns = []
|
||||
self._pr_options = []
|
||||
|
||||
_final = self.preparer.final_quote
|
||||
|
||||
quotes = dict(
|
||||
zip(
|
||||
("iq", "fq", "esc_fq"),
|
||||
[
|
||||
re.escape(s)
|
||||
for s in (
|
||||
self.preparer.initial_quote,
|
||||
_final,
|
||||
self.preparer._escape_identifier(_final),
|
||||
)
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
self._pr_name = _pr_compile(
|
||||
r"^CREATE (?:\w+ +)?TABLE +"
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($" % quotes,
|
||||
self.preparer._unescape_identifier,
|
||||
)
|
||||
|
||||
self._re_is_view = _re_compile(r"^CREATE(?! TABLE)(\s.*)?\sVIEW")
|
||||
|
||||
# `col`,`col2`(32),`col3`(15) DESC
|
||||
#
|
||||
self._re_keyexprs = _re_compile(
|
||||
r"(?:"
|
||||
r"(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)"
|
||||
r"(?:\((\d+)\))?(?: +(ASC|DESC))?(?=\,|$))+" % quotes
|
||||
)
|
||||
|
||||
# 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
|
||||
self._re_csv_str = _re_compile(r"\x27(?:\x27\x27|[^\x27])*\x27")
|
||||
|
||||
# 123 or 123,456
|
||||
self._re_csv_int = _re_compile(r"\d+")
|
||||
|
||||
# `colname` <type> [type opts]
|
||||
# (NOT NULL | NULL)
|
||||
# DEFAULT ('value' | CURRENT_TIMESTAMP...)
|
||||
# COMMENT 'comment'
|
||||
# COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
|
||||
# STORAGE (DISK|MEMORY)
|
||||
self._re_column = _re_compile(
|
||||
r" "
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
|
||||
r"(?P<coltype>\w+)"
|
||||
r"(?:\((?P<arg>(?:\d+|\d+,\d+|"
|
||||
r"(?:'(?:''|[^'])*',?)+))\))?"
|
||||
r"(?: +(?P<unsigned>UNSIGNED))?"
|
||||
r"(?: +(?P<zerofill>ZEROFILL))?"
|
||||
r"(?: +CHARACTER SET +(?P<charset>[\w_]+))?"
|
||||
r"(?: +COLLATE +(?P<collate>[\w_]+))?"
|
||||
r"(?: +(?P<notnull>(?:NOT )?NULL))?"
|
||||
r"(?: +DEFAULT +(?P<default>"
|
||||
r"(?:NULL|'(?:''|[^'])*'|\(.+?\)|[\-\w\.\(\)]+"
|
||||
r"(?: +ON UPDATE [\-\w\.\(\)]+)?)"
|
||||
r"))?"
|
||||
r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P<generated>\("
|
||||
r".*\))? ?(?P<persistence>VIRTUAL|STORED)?"
|
||||
r"(?: +(?P<notnull_generated>(?:NOT )?NULL))?"
|
||||
r")?"
|
||||
r"(?: +(?P<autoincr>AUTO_INCREMENT))?"
|
||||
r"(?: +COMMENT +'(?P<comment>(?:''|[^'])*)')?"
|
||||
r"(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?"
|
||||
r"(?: +STORAGE +(?P<storage>\w+))?"
|
||||
r"(?: +(?P<extra>.*))?"
|
||||
r",?$" % quotes
|
||||
)
|
||||
|
||||
# Fallback, try to parse as little as possible
|
||||
self._re_column_loose = _re_compile(
|
||||
r" "
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
|
||||
r"(?P<coltype>\w+)"
|
||||
r"(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?"
|
||||
r".*?(?P<notnull>(?:NOT )NULL)?" % quotes
|
||||
)
|
||||
|
||||
# (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
|
||||
# (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
|
||||
# KEY_BLOCK_SIZE size | WITH PARSER name /*!50100 WITH PARSER name */
|
||||
self._re_key = _re_compile(
|
||||
r" "
|
||||
r"(?:(?P<type>\S+) )?KEY"
|
||||
r"(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?"
|
||||
r"(?: +USING +(?P<using_pre>\S+))?"
|
||||
r" +\((?P<columns>.+?)\)"
|
||||
r"(?: +USING +(?P<using_post>\S+))?"
|
||||
r"(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?"
|
||||
r"(?: +WITH PARSER +(?P<parser>\S+))?"
|
||||
r"(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?"
|
||||
r"(?: +/\*(?P<version_sql>.+)\*/ *)?"
|
||||
r",?$" % quotes
|
||||
)
|
||||
|
||||
# https://forums.mysql.com/read.php?20,567102,567111#msg-567111
|
||||
# It means if the MySQL version >= \d+, execute what's in the comment
|
||||
self._re_key_version_sql = _re_compile(
|
||||
r"\!\d+ " r"(?: *WITH PARSER +(?P<parser>\S+) *)?"
|
||||
)
|
||||
|
||||
# CONSTRAINT `name` FOREIGN KEY (`local_col`)
|
||||
# REFERENCES `remote` (`remote_col`)
|
||||
# MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
|
||||
# ON DELETE CASCADE ON UPDATE RESTRICT
|
||||
#
|
||||
# unique constraints come back as KEYs
|
||||
kw = quotes.copy()
|
||||
kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT"
|
||||
self._re_fk_constraint = _re_compile(
|
||||
r" "
|
||||
r"CONSTRAINT +"
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
|
||||
r"FOREIGN KEY +"
|
||||
r"\((?P<local>[^\)]+?)\) REFERENCES +"
|
||||
r"(?P<table>%(iq)s[^%(fq)s]+%(fq)s"
|
||||
r"(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +"
|
||||
r"\((?P<foreign>(?:%(iq)s[^%(fq)s]+%(fq)s(?: *, *)?)+)\)"
|
||||
r"(?: +(?P<match>MATCH \w+))?"
|
||||
r"(?: +ON DELETE (?P<ondelete>%(on)s))?"
|
||||
r"(?: +ON UPDATE (?P<onupdate>%(on)s))?" % kw
|
||||
)
|
||||
|
||||
# CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)'
|
||||
# testing on MariaDB 10.2 shows that the CHECK constraint
|
||||
# is returned on a line by itself, so to match without worrying
|
||||
# about parenthesis in the expression we go to the end of the line
|
||||
self._re_ck_constraint = _re_compile(
|
||||
r" "
|
||||
r"CONSTRAINT +"
|
||||
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
|
||||
r"CHECK +"
|
||||
r"\((?P<sqltext>.+)\),?" % kw
|
||||
)
|
||||
|
||||
# PARTITION
|
||||
#
|
||||
# punt!
|
||||
self._re_partition = _re_compile(r"(?:.*)(?:SUB)?PARTITION(?:.*)")
|
||||
|
||||
# Table-level options (COLLATE, ENGINE, etc.)
|
||||
# Do the string options first, since they have quoted
|
||||
# strings we need to get rid of.
|
||||
for option in _options_of_type_string:
|
||||
self._add_option_string(option)
|
||||
|
||||
for option in (
|
||||
"ENGINE",
|
||||
"TYPE",
|
||||
"AUTO_INCREMENT",
|
||||
"AVG_ROW_LENGTH",
|
||||
"CHARACTER SET",
|
||||
"DEFAULT CHARSET",
|
||||
"CHECKSUM",
|
||||
"COLLATE",
|
||||
"DELAY_KEY_WRITE",
|
||||
"INSERT_METHOD",
|
||||
"MAX_ROWS",
|
||||
"MIN_ROWS",
|
||||
"PACK_KEYS",
|
||||
"ROW_FORMAT",
|
||||
"KEY_BLOCK_SIZE",
|
||||
"STATS_SAMPLE_PAGES",
|
||||
):
|
||||
self._add_option_word(option)
|
||||
|
||||
for option in (
|
||||
"PARTITION BY",
|
||||
"SUBPARTITION BY",
|
||||
"PARTITIONS",
|
||||
"SUBPARTITIONS",
|
||||
"PARTITION",
|
||||
"SUBPARTITION",
|
||||
):
|
||||
self._add_partition_option_word(option)
|
||||
|
||||
self._add_option_regex("UNION", r"\([^\)]+\)")
|
||||
self._add_option_regex("TABLESPACE", r".*? STORAGE DISK")
|
||||
self._add_option_regex(
|
||||
"RAID_TYPE",
|
||||
r"\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+",
|
||||
)
|
||||
|
||||
_optional_equals = r"(?:\s*(?:=\s*)|\s+)"
|
||||
|
||||
def _add_option_string(self, directive):
|
||||
regex = r"(?P<directive>%s)%s" r"'(?P<val>(?:[^']|'')*?)'(?!')" % (
|
||||
re.escape(directive),
|
||||
self._optional_equals,
|
||||
)
|
||||
self._pr_options.append(_pr_compile(regex, cleanup_text))
|
||||
|
||||
def _add_option_word(self, directive):
|
||||
regex = r"(?P<directive>%s)%s" r"(?P<val>\w+)" % (
|
||||
re.escape(directive),
|
||||
self._optional_equals,
|
||||
)
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
def _add_partition_option_word(self, directive):
|
||||
if directive == "PARTITION BY" or directive == "SUBPARTITION BY":
|
||||
regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\w+.*)" % (
|
||||
re.escape(directive),
|
||||
self._optional_equals,
|
||||
)
|
||||
elif directive == "SUBPARTITIONS" or directive == "PARTITIONS":
|
||||
regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\d+)" % (
|
||||
re.escape(directive),
|
||||
self._optional_equals,
|
||||
)
|
||||
else:
|
||||
regex = r"(?<!\S)(?P<directive>%s)(?!\S)" % (re.escape(directive),)
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
def _add_option_regex(self, directive, regex):
|
||||
regex = r"(?P<directive>%s)%s" r"(?P<val>%s)" % (
|
||||
re.escape(directive),
|
||||
self._optional_equals,
|
||||
regex,
|
||||
)
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
|
||||
_options_of_type_string = (
|
||||
"COMMENT",
|
||||
"DATA DIRECTORY",
|
||||
"INDEX DIRECTORY",
|
||||
"PASSWORD",
|
||||
"CONNECTION",
|
||||
)
|
||||
|
||||
|
||||
def _pr_compile(regex, cleanup=None):
|
||||
"""Prepare a 2-tuple of compiled regex and callable."""
|
||||
|
||||
return (_re_compile(regex), cleanup)
|
||||
|
||||
|
||||
def _re_compile(regex):
|
||||
"""Compile a string to regex, I and UNICODE."""
|
||||
|
||||
return re.compile(regex, re.I | re.UNICODE)
|
||||
|
||||
|
||||
def _strip_values(values):
|
||||
"Strip reflected values quotes"
|
||||
strip_values = []
|
||||
for a in values:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_values.append(a)
|
||||
return strip_values
|
||||
|
||||
|
||||
def cleanup_text(raw_text: str) -> str:
|
||||
if "\\" in raw_text:
|
||||
raw_text = re.sub(
|
||||
_control_char_regexp, lambda s: _control_char_map[s[0]], raw_text
|
||||
)
|
||||
return raw_text.replace("''", "'")
|
||||
|
||||
|
||||
_control_char_map = {
|
||||
"\\\\": "\\",
|
||||
"\\0": "\0",
|
||||
"\\a": "\a",
|
||||
"\\b": "\b",
|
||||
"\\t": "\t",
|
||||
"\\n": "\n",
|
||||
"\\v": "\v",
|
||||
"\\f": "\f",
|
||||
"\\r": "\r",
|
||||
# '\\e':'\e',
|
||||
}
|
||||
_control_char_regexp = re.compile(
|
||||
"|".join(re.escape(k) for k in _control_char_map)
|
||||
)
|
@ -0,0 +1,571 @@
|
||||
# dialects/mysql/reserved_words.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
# generated using:
|
||||
# https://gist.github.com/kkirsche/4f31f2153ed7a3248be1ec44ca6ddbc9
|
||||
#
|
||||
# https://mariadb.com/kb/en/reserved-words/
|
||||
# includes: Reserved Words, Oracle Mode (separate set unioned)
|
||||
# excludes: Exceptions, Function Names
|
||||
# mypy: ignore-errors
|
||||
|
||||
RESERVED_WORDS_MARIADB = {
|
||||
"accessible",
|
||||
"add",
|
||||
"all",
|
||||
"alter",
|
||||
"analyze",
|
||||
"and",
|
||||
"as",
|
||||
"asc",
|
||||
"asensitive",
|
||||
"before",
|
||||
"between",
|
||||
"bigint",
|
||||
"binary",
|
||||
"blob",
|
||||
"both",
|
||||
"by",
|
||||
"call",
|
||||
"cascade",
|
||||
"case",
|
||||
"change",
|
||||
"char",
|
||||
"character",
|
||||
"check",
|
||||
"collate",
|
||||
"column",
|
||||
"condition",
|
||||
"constraint",
|
||||
"continue",
|
||||
"convert",
|
||||
"create",
|
||||
"cross",
|
||||
"current_date",
|
||||
"current_role",
|
||||
"current_time",
|
||||
"current_timestamp",
|
||||
"current_user",
|
||||
"cursor",
|
||||
"database",
|
||||
"databases",
|
||||
"day_hour",
|
||||
"day_microsecond",
|
||||
"day_minute",
|
||||
"day_second",
|
||||
"dec",
|
||||
"decimal",
|
||||
"declare",
|
||||
"default",
|
||||
"delayed",
|
||||
"delete",
|
||||
"desc",
|
||||
"describe",
|
||||
"deterministic",
|
||||
"distinct",
|
||||
"distinctrow",
|
||||
"div",
|
||||
"do_domain_ids",
|
||||
"double",
|
||||
"drop",
|
||||
"dual",
|
||||
"each",
|
||||
"else",
|
||||
"elseif",
|
||||
"enclosed",
|
||||
"escaped",
|
||||
"except",
|
||||
"exists",
|
||||
"exit",
|
||||
"explain",
|
||||
"false",
|
||||
"fetch",
|
||||
"float",
|
||||
"float4",
|
||||
"float8",
|
||||
"for",
|
||||
"force",
|
||||
"foreign",
|
||||
"from",
|
||||
"fulltext",
|
||||
"general",
|
||||
"grant",
|
||||
"group",
|
||||
"having",
|
||||
"high_priority",
|
||||
"hour_microsecond",
|
||||
"hour_minute",
|
||||
"hour_second",
|
||||
"if",
|
||||
"ignore",
|
||||
"ignore_domain_ids",
|
||||
"ignore_server_ids",
|
||||
"in",
|
||||
"index",
|
||||
"infile",
|
||||
"inner",
|
||||
"inout",
|
||||
"insensitive",
|
||||
"insert",
|
||||
"int",
|
||||
"int1",
|
||||
"int2",
|
||||
"int3",
|
||||
"int4",
|
||||
"int8",
|
||||
"integer",
|
||||
"intersect",
|
||||
"interval",
|
||||
"into",
|
||||
"is",
|
||||
"iterate",
|
||||
"join",
|
||||
"key",
|
||||
"keys",
|
||||
"kill",
|
||||
"leading",
|
||||
"leave",
|
||||
"left",
|
||||
"like",
|
||||
"limit",
|
||||
"linear",
|
||||
"lines",
|
||||
"load",
|
||||
"localtime",
|
||||
"localtimestamp",
|
||||
"lock",
|
||||
"long",
|
||||
"longblob",
|
||||
"longtext",
|
||||
"loop",
|
||||
"low_priority",
|
||||
"master_heartbeat_period",
|
||||
"master_ssl_verify_server_cert",
|
||||
"match",
|
||||
"maxvalue",
|
||||
"mediumblob",
|
||||
"mediumint",
|
||||
"mediumtext",
|
||||
"middleint",
|
||||
"minute_microsecond",
|
||||
"minute_second",
|
||||
"mod",
|
||||
"modifies",
|
||||
"natural",
|
||||
"no_write_to_binlog",
|
||||
"not",
|
||||
"null",
|
||||
"numeric",
|
||||
"offset",
|
||||
"on",
|
||||
"optimize",
|
||||
"option",
|
||||
"optionally",
|
||||
"or",
|
||||
"order",
|
||||
"out",
|
||||
"outer",
|
||||
"outfile",
|
||||
"over",
|
||||
"page_checksum",
|
||||
"parse_vcol_expr",
|
||||
"partition",
|
||||
"position",
|
||||
"precision",
|
||||
"primary",
|
||||
"procedure",
|
||||
"purge",
|
||||
"range",
|
||||
"read",
|
||||
"read_write",
|
||||
"reads",
|
||||
"real",
|
||||
"recursive",
|
||||
"ref_system_id",
|
||||
"references",
|
||||
"regexp",
|
||||
"release",
|
||||
"rename",
|
||||
"repeat",
|
||||
"replace",
|
||||
"require",
|
||||
"resignal",
|
||||
"restrict",
|
||||
"return",
|
||||
"returning",
|
||||
"revoke",
|
||||
"right",
|
||||
"rlike",
|
||||
"rows",
|
||||
"row_number",
|
||||
"schema",
|
||||
"schemas",
|
||||
"second_microsecond",
|
||||
"select",
|
||||
"sensitive",
|
||||
"separator",
|
||||
"set",
|
||||
"show",
|
||||
"signal",
|
||||
"slow",
|
||||
"smallint",
|
||||
"spatial",
|
||||
"specific",
|
||||
"sql",
|
||||
"sql_big_result",
|
||||
"sql_calc_found_rows",
|
||||
"sql_small_result",
|
||||
"sqlexception",
|
||||
"sqlstate",
|
||||
"sqlwarning",
|
||||
"ssl",
|
||||
"starting",
|
||||
"stats_auto_recalc",
|
||||
"stats_persistent",
|
||||
"stats_sample_pages",
|
||||
"straight_join",
|
||||
"table",
|
||||
"terminated",
|
||||
"then",
|
||||
"tinyblob",
|
||||
"tinyint",
|
||||
"tinytext",
|
||||
"to",
|
||||
"trailing",
|
||||
"trigger",
|
||||
"true",
|
||||
"undo",
|
||||
"union",
|
||||
"unique",
|
||||
"unlock",
|
||||
"unsigned",
|
||||
"update",
|
||||
"usage",
|
||||
"use",
|
||||
"using",
|
||||
"utc_date",
|
||||
"utc_time",
|
||||
"utc_timestamp",
|
||||
"values",
|
||||
"varbinary",
|
||||
"varchar",
|
||||
"varcharacter",
|
||||
"varying",
|
||||
"when",
|
||||
"where",
|
||||
"while",
|
||||
"window",
|
||||
"with",
|
||||
"write",
|
||||
"xor",
|
||||
"year_month",
|
||||
"zerofill",
|
||||
}.union(
|
||||
{
|
||||
"body",
|
||||
"elsif",
|
||||
"goto",
|
||||
"history",
|
||||
"others",
|
||||
"package",
|
||||
"period",
|
||||
"raise",
|
||||
"rowtype",
|
||||
"system",
|
||||
"system_time",
|
||||
"versioning",
|
||||
"without",
|
||||
}
|
||||
)
|
||||
|
||||
# https://dev.mysql.com/doc/refman/8.3/en/keywords.html
|
||||
# https://dev.mysql.com/doc/refman/8.0/en/keywords.html
|
||||
# https://dev.mysql.com/doc/refman/5.7/en/keywords.html
|
||||
# https://dev.mysql.com/doc/refman/5.6/en/keywords.html
|
||||
# includes: MySQL x.0 Keywords and Reserved Words
|
||||
# excludes: MySQL x.0 New Keywords and Reserved Words,
|
||||
# MySQL x.0 Removed Keywords and Reserved Words
|
||||
RESERVED_WORDS_MYSQL = {
|
||||
"accessible",
|
||||
"add",
|
||||
"admin",
|
||||
"all",
|
||||
"alter",
|
||||
"analyze",
|
||||
"and",
|
||||
"array",
|
||||
"as",
|
||||
"asc",
|
||||
"asensitive",
|
||||
"before",
|
||||
"between",
|
||||
"bigint",
|
||||
"binary",
|
||||
"blob",
|
||||
"both",
|
||||
"by",
|
||||
"call",
|
||||
"cascade",
|
||||
"case",
|
||||
"change",
|
||||
"char",
|
||||
"character",
|
||||
"check",
|
||||
"collate",
|
||||
"column",
|
||||
"condition",
|
||||
"constraint",
|
||||
"continue",
|
||||
"convert",
|
||||
"create",
|
||||
"cross",
|
||||
"cube",
|
||||
"cume_dist",
|
||||
"current_date",
|
||||
"current_time",
|
||||
"current_timestamp",
|
||||
"current_user",
|
||||
"cursor",
|
||||
"database",
|
||||
"databases",
|
||||
"day_hour",
|
||||
"day_microsecond",
|
||||
"day_minute",
|
||||
"day_second",
|
||||
"dec",
|
||||
"decimal",
|
||||
"declare",
|
||||
"default",
|
||||
"delayed",
|
||||
"delete",
|
||||
"dense_rank",
|
||||
"desc",
|
||||
"describe",
|
||||
"deterministic",
|
||||
"distinct",
|
||||
"distinctrow",
|
||||
"div",
|
||||
"double",
|
||||
"drop",
|
||||
"dual",
|
||||
"each",
|
||||
"else",
|
||||
"elseif",
|
||||
"empty",
|
||||
"enclosed",
|
||||
"escaped",
|
||||
"except",
|
||||
"exists",
|
||||
"exit",
|
||||
"explain",
|
||||
"false",
|
||||
"fetch",
|
||||
"first_value",
|
||||
"float",
|
||||
"float4",
|
||||
"float8",
|
||||
"for",
|
||||
"force",
|
||||
"foreign",
|
||||
"from",
|
||||
"fulltext",
|
||||
"function",
|
||||
"general",
|
||||
"generated",
|
||||
"get",
|
||||
"get_master_public_key",
|
||||
"grant",
|
||||
"group",
|
||||
"grouping",
|
||||
"groups",
|
||||
"having",
|
||||
"high_priority",
|
||||
"hour_microsecond",
|
||||
"hour_minute",
|
||||
"hour_second",
|
||||
"if",
|
||||
"ignore",
|
||||
"ignore_server_ids",
|
||||
"in",
|
||||
"index",
|
||||
"infile",
|
||||
"inner",
|
||||
"inout",
|
||||
"insensitive",
|
||||
"insert",
|
||||
"int",
|
||||
"int1",
|
||||
"int2",
|
||||
"int3",
|
||||
"int4",
|
||||
"int8",
|
||||
"integer",
|
||||
"intersect",
|
||||
"interval",
|
||||
"into",
|
||||
"io_after_gtids",
|
||||
"io_before_gtids",
|
||||
"is",
|
||||
"iterate",
|
||||
"join",
|
||||
"json_table",
|
||||
"key",
|
||||
"keys",
|
||||
"kill",
|
||||
"lag",
|
||||
"last_value",
|
||||
"lateral",
|
||||
"lead",
|
||||
"leading",
|
||||
"leave",
|
||||
"left",
|
||||
"like",
|
||||
"limit",
|
||||
"linear",
|
||||
"lines",
|
||||
"load",
|
||||
"localtime",
|
||||
"localtimestamp",
|
||||
"lock",
|
||||
"long",
|
||||
"longblob",
|
||||
"longtext",
|
||||
"loop",
|
||||
"low_priority",
|
||||
"master_bind",
|
||||
"master_heartbeat_period",
|
||||
"master_ssl_verify_server_cert",
|
||||
"match",
|
||||
"maxvalue",
|
||||
"mediumblob",
|
||||
"mediumint",
|
||||
"mediumtext",
|
||||
"member",
|
||||
"middleint",
|
||||
"minute_microsecond",
|
||||
"minute_second",
|
||||
"mod",
|
||||
"modifies",
|
||||
"natural",
|
||||
"no_write_to_binlog",
|
||||
"not",
|
||||
"nth_value",
|
||||
"ntile",
|
||||
"null",
|
||||
"numeric",
|
||||
"of",
|
||||
"on",
|
||||
"optimize",
|
||||
"optimizer_costs",
|
||||
"option",
|
||||
"optionally",
|
||||
"or",
|
||||
"order",
|
||||
"out",
|
||||
"outer",
|
||||
"outfile",
|
||||
"over",
|
||||
"parse_gcol_expr",
|
||||
"parallel",
|
||||
"partition",
|
||||
"percent_rank",
|
||||
"persist",
|
||||
"persist_only",
|
||||
"precision",
|
||||
"primary",
|
||||
"procedure",
|
||||
"purge",
|
||||
"qualify",
|
||||
"range",
|
||||
"rank",
|
||||
"read",
|
||||
"read_write",
|
||||
"reads",
|
||||
"real",
|
||||
"recursive",
|
||||
"references",
|
||||
"regexp",
|
||||
"release",
|
||||
"rename",
|
||||
"repeat",
|
||||
"replace",
|
||||
"require",
|
||||
"resignal",
|
||||
"restrict",
|
||||
"return",
|
||||
"revoke",
|
||||
"right",
|
||||
"rlike",
|
||||
"role",
|
||||
"row",
|
||||
"row_number",
|
||||
"rows",
|
||||
"schema",
|
||||
"schemas",
|
||||
"second_microsecond",
|
||||
"select",
|
||||
"sensitive",
|
||||
"separator",
|
||||
"set",
|
||||
"show",
|
||||
"signal",
|
||||
"slow",
|
||||
"smallint",
|
||||
"spatial",
|
||||
"specific",
|
||||
"sql",
|
||||
"sql_after_gtids",
|
||||
"sql_before_gtids",
|
||||
"sql_big_result",
|
||||
"sql_calc_found_rows",
|
||||
"sql_small_result",
|
||||
"sqlexception",
|
||||
"sqlstate",
|
||||
"sqlwarning",
|
||||
"ssl",
|
||||
"starting",
|
||||
"stored",
|
||||
"straight_join",
|
||||
"system",
|
||||
"table",
|
||||
"terminated",
|
||||
"then",
|
||||
"tinyblob",
|
||||
"tinyint",
|
||||
"tinytext",
|
||||
"to",
|
||||
"trailing",
|
||||
"trigger",
|
||||
"true",
|
||||
"undo",
|
||||
"union",
|
||||
"unique",
|
||||
"unlock",
|
||||
"unsigned",
|
||||
"update",
|
||||
"usage",
|
||||
"use",
|
||||
"using",
|
||||
"utc_date",
|
||||
"utc_time",
|
||||
"utc_timestamp",
|
||||
"values",
|
||||
"varbinary",
|
||||
"varchar",
|
||||
"varcharacter",
|
||||
"varying",
|
||||
"virtual",
|
||||
"when",
|
||||
"where",
|
||||
"while",
|
||||
"window",
|
||||
"with",
|
||||
"write",
|
||||
"xor",
|
||||
"year_month",
|
||||
"zerofill",
|
||||
}
|
@ -0,0 +1,773 @@
|
||||
# dialects/mysql/types.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from ... import exc
|
||||
from ... import util
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
class _NumericType:
|
||||
"""Base for MySQL numeric types.
|
||||
|
||||
This is the base both for NUMERIC as well as INTEGER, hence
|
||||
it's a mixin.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, unsigned=False, zerofill=False, **kw):
|
||||
self.unsigned = unsigned
|
||||
self.zerofill = zerofill
|
||||
super().__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[_NumericType, sqltypes.Numeric]
|
||||
)
|
||||
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
if isinstance(self, (REAL, DOUBLE)) and (
|
||||
(precision is None and scale is not None)
|
||||
or (precision is not None and scale is None)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether."
|
||||
)
|
||||
super().__init__(precision=precision, asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[_FloatType, _NumericType, sqltypes.Float]
|
||||
)
|
||||
|
||||
|
||||
class _IntegerType(_NumericType, sqltypes.Integer):
|
||||
def __init__(self, display_width=None, **kw):
|
||||
self.display_width = display_width
|
||||
super().__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[_IntegerType, _NumericType, sqltypes.Integer]
|
||||
)
|
||||
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for MySQL string types."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
charset=None,
|
||||
collation=None,
|
||||
ascii=False, # noqa
|
||||
binary=False,
|
||||
unicode=False,
|
||||
national=False,
|
||||
**kw,
|
||||
):
|
||||
self.charset = charset
|
||||
|
||||
# allow collate= or collation=
|
||||
kw.setdefault("collation", kw.pop("collate", collation))
|
||||
|
||||
self.ascii = ascii
|
||||
self.unicode = unicode
|
||||
self.binary = binary
|
||||
self.national = national
|
||||
super().__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[_StringType, sqltypes.String]
|
||||
)
|
||||
|
||||
|
||||
class _MatchType(sqltypes.Float, sqltypes.MatchType):
|
||||
def __init__(self, **kw):
|
||||
# TODO: float arguments?
|
||||
sqltypes.Float.__init__(self)
|
||||
sqltypes.MatchType.__init__(self)
|
||||
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""MySQL NUMERIC type."""
|
||||
|
||||
__visit_name__ = "NUMERIC"
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a NUMERIC.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(
|
||||
precision=precision, scale=scale, asdecimal=asdecimal, **kw
|
||||
)
|
||||
|
||||
|
||||
class DECIMAL(_NumericType, sqltypes.DECIMAL):
|
||||
"""MySQL DECIMAL type."""
|
||||
|
||||
__visit_name__ = "DECIMAL"
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DECIMAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(
|
||||
precision=precision, scale=scale, asdecimal=asdecimal, **kw
|
||||
)
|
||||
|
||||
|
||||
class DOUBLE(_FloatType, sqltypes.DOUBLE):
|
||||
"""MySQL DOUBLE type."""
|
||||
|
||||
__visit_name__ = "DOUBLE"
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.DOUBLE` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(
|
||||
precision=precision, scale=scale, asdecimal=asdecimal, **kw
|
||||
)
|
||||
|
||||
|
||||
class REAL(_FloatType, sqltypes.REAL):
|
||||
"""MySQL REAL type."""
|
||||
|
||||
__visit_name__ = "REAL"
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.REAL` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(
|
||||
precision=precision, scale=scale, asdecimal=asdecimal, **kw
|
||||
)
|
||||
|
||||
|
||||
class FLOAT(_FloatType, sqltypes.FLOAT):
|
||||
"""MySQL FLOAT type."""
|
||||
|
||||
__visit_name__ = "FLOAT"
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
|
||||
"""Construct a FLOAT.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(
|
||||
precision=precision, scale=scale, asdecimal=asdecimal, **kw
|
||||
)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class INTEGER(_IntegerType, sqltypes.INTEGER):
|
||||
"""MySQL INTEGER type."""
|
||||
|
||||
__visit_name__ = "INTEGER"
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct an INTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIGINT(_IntegerType, sqltypes.BIGINT):
|
||||
"""MySQL BIGINTEGER type."""
|
||||
|
||||
__visit_name__ = "BIGINT"
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a BIGINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class MEDIUMINT(_IntegerType):
|
||||
"""MySQL MEDIUMINTEGER type."""
|
||||
|
||||
__visit_name__ = "MEDIUMINT"
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a MEDIUMINTEGER
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class TINYINT(_IntegerType):
|
||||
"""MySQL TINYINT type."""
|
||||
|
||||
__visit_name__ = "TINYINT"
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a TINYINT.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
|
||||
"""MySQL SMALLINTEGER type."""
|
||||
|
||||
__visit_name__ = "SMALLINT"
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a SMALLINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super().__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIT(sqltypes.TypeEngine):
|
||||
"""MySQL BIT type.
|
||||
|
||||
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
|
||||
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
|
||||
MSTinyInteger() type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = "BIT"
|
||||
|
||||
def __init__(self, length=None):
|
||||
"""Construct a BIT.
|
||||
|
||||
:param length: Optional, number of bits.
|
||||
|
||||
"""
|
||||
self.length = length
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert a MySQL's 64 bit, variable length binary string to a
|
||||
long."""
|
||||
|
||||
if dialect.supports_native_bit:
|
||||
return None
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in value:
|
||||
if not isinstance(i, int):
|
||||
i = ord(i) # convert byte to int on Python 2
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class TIME(sqltypes.TIME):
|
||||
"""MySQL TIME type."""
|
||||
|
||||
__visit_name__ = "TIME"
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
"""
|
||||
super().__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
time = datetime.time
|
||||
|
||||
def process(value):
|
||||
# convert from a timedelta value
|
||||
if value is not None:
|
||||
microseconds = value.microseconds
|
||||
seconds = value.seconds
|
||||
minutes = seconds // 60
|
||||
return time(
|
||||
minutes // 60,
|
||||
minutes % 60,
|
||||
seconds - minutes * 60,
|
||||
microsecond=microseconds,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""MySQL TIMESTAMP type."""
|
||||
|
||||
__visit_name__ = "TIMESTAMP"
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIMESTAMP type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIMESTAMP type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
"""
|
||||
super().__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class DATETIME(sqltypes.DATETIME):
|
||||
"""MySQL DATETIME type."""
|
||||
|
||||
__visit_name__ = "DATETIME"
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL DATETIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the DATETIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
"""
|
||||
super().__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class YEAR(sqltypes.TypeEngine):
|
||||
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
|
||||
|
||||
__visit_name__ = "YEAR"
|
||||
|
||||
def __init__(self, display_width=None):
|
||||
self.display_width = display_width
|
||||
|
||||
|
||||
class TEXT(_StringType, sqltypes.TEXT):
|
||||
"""MySQL TEXT type, for character storage encoded up to 2^16 bytes."""
|
||||
|
||||
__visit_name__ = "TEXT"
|
||||
|
||||
def __init__(self, length=None, **kw):
|
||||
"""Construct a TEXT.
|
||||
|
||||
:param length: Optional, if provided the server may optimize storage
|
||||
by substituting the smallest TEXT type sufficient to store
|
||||
``length`` bytes of characters.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super().__init__(length=length, **kw)
|
||||
|
||||
|
||||
class TINYTEXT(_StringType):
|
||||
"""MySQL TINYTEXT type, for character storage encoded up to 2^8 bytes."""
|
||||
|
||||
__visit_name__ = "TINYTEXT"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a TINYTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class MEDIUMTEXT(_StringType):
|
||||
"""MySQL MEDIUMTEXT type, for character storage encoded up
|
||||
to 2^24 bytes."""
|
||||
|
||||
__visit_name__ = "MEDIUMTEXT"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a MEDIUMTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class LONGTEXT(_StringType):
|
||||
"""MySQL LONGTEXT type, for character storage encoded up to 2^32 bytes."""
|
||||
|
||||
__visit_name__ = "LONGTEXT"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a LONGTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""MySQL VARCHAR type, for variable-length character data."""
|
||||
|
||||
__visit_name__ = "VARCHAR"
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a VARCHAR.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super().__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""MySQL CHAR type, for fixed-length character data."""
|
||||
|
||||
__visit_name__ = "CHAR"
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a CHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
super().__init__(length=length, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _adapt_string_for_cast(cls, type_):
|
||||
# copy the given string type into a CHAR
|
||||
# for the purposes of rendering a CAST expression
|
||||
type_ = sqltypes.to_instance(type_)
|
||||
if isinstance(type_, sqltypes.CHAR):
|
||||
return type_
|
||||
elif isinstance(type_, _StringType):
|
||||
return CHAR(
|
||||
length=type_.length,
|
||||
charset=type_.charset,
|
||||
collation=type_.collation,
|
||||
ascii=type_.ascii,
|
||||
binary=type_.binary,
|
||||
unicode=type_.unicode,
|
||||
national=False, # not supported in CAST
|
||||
)
|
||||
else:
|
||||
return CHAR(length=type_.length)
|
||||
|
||||
|
||||
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
|
||||
"""MySQL NVARCHAR type.
|
||||
|
||||
For variable-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = "NVARCHAR"
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NVARCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs["national"] = True
|
||||
super().__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class NCHAR(_StringType, sqltypes.NCHAR):
|
||||
"""MySQL NCHAR type.
|
||||
|
||||
For fixed-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = "NCHAR"
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs["national"] = True
|
||||
super().__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class TINYBLOB(sqltypes._Binary):
|
||||
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
|
||||
|
||||
__visit_name__ = "TINYBLOB"
|
||||
|
||||
|
||||
class MEDIUMBLOB(sqltypes._Binary):
|
||||
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
|
||||
|
||||
__visit_name__ = "MEDIUMBLOB"
|
||||
|
||||
|
||||
class LONGBLOB(sqltypes._Binary):
|
||||
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
|
||||
|
||||
__visit_name__ = "LONGBLOB"
|
@ -0,0 +1,67 @@
|
||||
# dialects/oracle/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
from types import ModuleType
|
||||
|
||||
from . import base # noqa
|
||||
from . import cx_oracle # noqa
|
||||
from . import oracledb # noqa
|
||||
from .base import BFILE
|
||||
from .base import BINARY_DOUBLE
|
||||
from .base import BINARY_FLOAT
|
||||
from .base import BLOB
|
||||
from .base import CHAR
|
||||
from .base import CLOB
|
||||
from .base import DATE
|
||||
from .base import DOUBLE_PRECISION
|
||||
from .base import FLOAT
|
||||
from .base import INTERVAL
|
||||
from .base import LONG
|
||||
from .base import NCHAR
|
||||
from .base import NCLOB
|
||||
from .base import NUMBER
|
||||
from .base import NVARCHAR
|
||||
from .base import NVARCHAR2
|
||||
from .base import RAW
|
||||
from .base import REAL
|
||||
from .base import ROWID
|
||||
from .base import TIMESTAMP
|
||||
from .base import VARCHAR
|
||||
from .base import VARCHAR2
|
||||
|
||||
# Alias oracledb also as oracledb_async
|
||||
oracledb_async = type(
|
||||
"oracledb_async", (ModuleType,), {"dialect": oracledb.dialect_async}
|
||||
)
|
||||
|
||||
base.dialect = dialect = cx_oracle.dialect
|
||||
|
||||
__all__ = (
|
||||
"VARCHAR",
|
||||
"NVARCHAR",
|
||||
"CHAR",
|
||||
"NCHAR",
|
||||
"DATE",
|
||||
"NUMBER",
|
||||
"BLOB",
|
||||
"BFILE",
|
||||
"CLOB",
|
||||
"NCLOB",
|
||||
"TIMESTAMP",
|
||||
"RAW",
|
||||
"FLOAT",
|
||||
"DOUBLE_PRECISION",
|
||||
"BINARY_DOUBLE",
|
||||
"BINARY_FLOAT",
|
||||
"LONG",
|
||||
"dialect",
|
||||
"INTERVAL",
|
||||
"VARCHAR2",
|
||||
"NVARCHAR2",
|
||||
"ROWID",
|
||||
"REAL",
|
||||
)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
3484
venv/lib/python3.11/site-packages/sqlalchemy/dialects/oracle/base.py
Normal file
3484
venv/lib/python3.11/site-packages/sqlalchemy/dialects/oracle/base.py
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,507 @@
|
||||
# dialects/oracle/dictionary.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from .types import DATE
|
||||
from .types import LONG
|
||||
from .types import NUMBER
|
||||
from .types import RAW
|
||||
from .types import VARCHAR2
|
||||
from ... import Column
|
||||
from ... import MetaData
|
||||
from ... import Table
|
||||
from ... import table
|
||||
from ...sql.sqltypes import CHAR
|
||||
|
||||
# constants
|
||||
DB_LINK_PLACEHOLDER = "__$sa_dblink$__"
|
||||
# tables
|
||||
dual = table("dual")
|
||||
dictionary_meta = MetaData()
|
||||
|
||||
# NOTE: all the dictionary_meta are aliases because oracle does not like
|
||||
# using the full table@dblink for every column in query, and complains with
|
||||
# ORA-00960: ambiguous column naming in select list
|
||||
all_tables = Table(
|
||||
"all_tables" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("tablespace_name", VARCHAR2(30)),
|
||||
Column("cluster_name", VARCHAR2(128)),
|
||||
Column("iot_name", VARCHAR2(128)),
|
||||
Column("status", VARCHAR2(8)),
|
||||
Column("pct_free", NUMBER),
|
||||
Column("pct_used", NUMBER),
|
||||
Column("ini_trans", NUMBER),
|
||||
Column("max_trans", NUMBER),
|
||||
Column("initial_extent", NUMBER),
|
||||
Column("next_extent", NUMBER),
|
||||
Column("min_extents", NUMBER),
|
||||
Column("max_extents", NUMBER),
|
||||
Column("pct_increase", NUMBER),
|
||||
Column("freelists", NUMBER),
|
||||
Column("freelist_groups", NUMBER),
|
||||
Column("logging", VARCHAR2(3)),
|
||||
Column("backed_up", VARCHAR2(1)),
|
||||
Column("num_rows", NUMBER),
|
||||
Column("blocks", NUMBER),
|
||||
Column("empty_blocks", NUMBER),
|
||||
Column("avg_space", NUMBER),
|
||||
Column("chain_cnt", NUMBER),
|
||||
Column("avg_row_len", NUMBER),
|
||||
Column("avg_space_freelist_blocks", NUMBER),
|
||||
Column("num_freelist_blocks", NUMBER),
|
||||
Column("degree", VARCHAR2(10)),
|
||||
Column("instances", VARCHAR2(10)),
|
||||
Column("cache", VARCHAR2(5)),
|
||||
Column("table_lock", VARCHAR2(8)),
|
||||
Column("sample_size", NUMBER),
|
||||
Column("last_analyzed", DATE),
|
||||
Column("partitioned", VARCHAR2(3)),
|
||||
Column("iot_type", VARCHAR2(12)),
|
||||
Column("temporary", VARCHAR2(1)),
|
||||
Column("secondary", VARCHAR2(1)),
|
||||
Column("nested", VARCHAR2(3)),
|
||||
Column("buffer_pool", VARCHAR2(7)),
|
||||
Column("flash_cache", VARCHAR2(7)),
|
||||
Column("cell_flash_cache", VARCHAR2(7)),
|
||||
Column("row_movement", VARCHAR2(8)),
|
||||
Column("global_stats", VARCHAR2(3)),
|
||||
Column("user_stats", VARCHAR2(3)),
|
||||
Column("duration", VARCHAR2(15)),
|
||||
Column("skip_corrupt", VARCHAR2(8)),
|
||||
Column("monitoring", VARCHAR2(3)),
|
||||
Column("cluster_owner", VARCHAR2(128)),
|
||||
Column("dependencies", VARCHAR2(8)),
|
||||
Column("compression", VARCHAR2(8)),
|
||||
Column("compress_for", VARCHAR2(30)),
|
||||
Column("dropped", VARCHAR2(3)),
|
||||
Column("read_only", VARCHAR2(3)),
|
||||
Column("segment_created", VARCHAR2(3)),
|
||||
Column("result_cache", VARCHAR2(7)),
|
||||
Column("clustering", VARCHAR2(3)),
|
||||
Column("activity_tracking", VARCHAR2(23)),
|
||||
Column("dml_timestamp", VARCHAR2(25)),
|
||||
Column("has_identity", VARCHAR2(3)),
|
||||
Column("container_data", VARCHAR2(3)),
|
||||
Column("inmemory", VARCHAR2(8)),
|
||||
Column("inmemory_priority", VARCHAR2(8)),
|
||||
Column("inmemory_distribute", VARCHAR2(15)),
|
||||
Column("inmemory_compression", VARCHAR2(17)),
|
||||
Column("inmemory_duplicate", VARCHAR2(13)),
|
||||
Column("default_collation", VARCHAR2(100)),
|
||||
Column("duplicated", VARCHAR2(1)),
|
||||
Column("sharded", VARCHAR2(1)),
|
||||
Column("externally_sharded", VARCHAR2(1)),
|
||||
Column("externally_duplicated", VARCHAR2(1)),
|
||||
Column("external", VARCHAR2(3)),
|
||||
Column("hybrid", VARCHAR2(3)),
|
||||
Column("cellmemory", VARCHAR2(24)),
|
||||
Column("containers_default", VARCHAR2(3)),
|
||||
Column("container_map", VARCHAR2(3)),
|
||||
Column("extended_data_link", VARCHAR2(3)),
|
||||
Column("extended_data_link_map", VARCHAR2(3)),
|
||||
Column("inmemory_service", VARCHAR2(12)),
|
||||
Column("inmemory_service_name", VARCHAR2(1000)),
|
||||
Column("container_map_object", VARCHAR2(3)),
|
||||
Column("memoptimize_read", VARCHAR2(8)),
|
||||
Column("memoptimize_write", VARCHAR2(8)),
|
||||
Column("has_sensitive_column", VARCHAR2(3)),
|
||||
Column("admit_null", VARCHAR2(3)),
|
||||
Column("data_link_dml_enabled", VARCHAR2(3)),
|
||||
Column("logical_replication", VARCHAR2(8)),
|
||||
).alias("a_tables")
|
||||
|
||||
all_views = Table(
|
||||
"all_views" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("view_name", VARCHAR2(128), nullable=False),
|
||||
Column("text_length", NUMBER),
|
||||
Column("text", LONG),
|
||||
Column("text_vc", VARCHAR2(4000)),
|
||||
Column("type_text_length", NUMBER),
|
||||
Column("type_text", VARCHAR2(4000)),
|
||||
Column("oid_text_length", NUMBER),
|
||||
Column("oid_text", VARCHAR2(4000)),
|
||||
Column("view_type_owner", VARCHAR2(128)),
|
||||
Column("view_type", VARCHAR2(128)),
|
||||
Column("superview_name", VARCHAR2(128)),
|
||||
Column("editioning_view", VARCHAR2(1)),
|
||||
Column("read_only", VARCHAR2(1)),
|
||||
Column("container_data", VARCHAR2(1)),
|
||||
Column("bequeath", VARCHAR2(12)),
|
||||
Column("origin_con_id", VARCHAR2(256)),
|
||||
Column("default_collation", VARCHAR2(100)),
|
||||
Column("containers_default", VARCHAR2(3)),
|
||||
Column("container_map", VARCHAR2(3)),
|
||||
Column("extended_data_link", VARCHAR2(3)),
|
||||
Column("extended_data_link_map", VARCHAR2(3)),
|
||||
Column("has_sensitive_column", VARCHAR2(3)),
|
||||
Column("admit_null", VARCHAR2(3)),
|
||||
Column("pdb_local_only", VARCHAR2(3)),
|
||||
).alias("a_views")
|
||||
|
||||
all_sequences = Table(
|
||||
"all_sequences" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("sequence_owner", VARCHAR2(128), nullable=False),
|
||||
Column("sequence_name", VARCHAR2(128), nullable=False),
|
||||
Column("min_value", NUMBER),
|
||||
Column("max_value", NUMBER),
|
||||
Column("increment_by", NUMBER, nullable=False),
|
||||
Column("cycle_flag", VARCHAR2(1)),
|
||||
Column("order_flag", VARCHAR2(1)),
|
||||
Column("cache_size", NUMBER, nullable=False),
|
||||
Column("last_number", NUMBER, nullable=False),
|
||||
Column("scale_flag", VARCHAR2(1)),
|
||||
Column("extend_flag", VARCHAR2(1)),
|
||||
Column("sharded_flag", VARCHAR2(1)),
|
||||
Column("session_flag", VARCHAR2(1)),
|
||||
Column("keep_value", VARCHAR2(1)),
|
||||
).alias("a_sequences")
|
||||
|
||||
all_users = Table(
|
||||
"all_users" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("username", VARCHAR2(128), nullable=False),
|
||||
Column("user_id", NUMBER, nullable=False),
|
||||
Column("created", DATE, nullable=False),
|
||||
Column("common", VARCHAR2(3)),
|
||||
Column("oracle_maintained", VARCHAR2(1)),
|
||||
Column("inherited", VARCHAR2(3)),
|
||||
Column("default_collation", VARCHAR2(100)),
|
||||
Column("implicit", VARCHAR2(3)),
|
||||
Column("all_shard", VARCHAR2(3)),
|
||||
Column("external_shard", VARCHAR2(3)),
|
||||
).alias("a_users")
|
||||
|
||||
all_mviews = Table(
|
||||
"all_mviews" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("mview_name", VARCHAR2(128), nullable=False),
|
||||
Column("container_name", VARCHAR2(128), nullable=False),
|
||||
Column("query", LONG),
|
||||
Column("query_len", NUMBER(38)),
|
||||
Column("updatable", VARCHAR2(1)),
|
||||
Column("update_log", VARCHAR2(128)),
|
||||
Column("master_rollback_seg", VARCHAR2(128)),
|
||||
Column("master_link", VARCHAR2(128)),
|
||||
Column("rewrite_enabled", VARCHAR2(1)),
|
||||
Column("rewrite_capability", VARCHAR2(9)),
|
||||
Column("refresh_mode", VARCHAR2(6)),
|
||||
Column("refresh_method", VARCHAR2(8)),
|
||||
Column("build_mode", VARCHAR2(9)),
|
||||
Column("fast_refreshable", VARCHAR2(18)),
|
||||
Column("last_refresh_type", VARCHAR2(8)),
|
||||
Column("last_refresh_date", DATE),
|
||||
Column("last_refresh_end_time", DATE),
|
||||
Column("staleness", VARCHAR2(19)),
|
||||
Column("after_fast_refresh", VARCHAR2(19)),
|
||||
Column("unknown_prebuilt", VARCHAR2(1)),
|
||||
Column("unknown_plsql_func", VARCHAR2(1)),
|
||||
Column("unknown_external_table", VARCHAR2(1)),
|
||||
Column("unknown_consider_fresh", VARCHAR2(1)),
|
||||
Column("unknown_import", VARCHAR2(1)),
|
||||
Column("unknown_trusted_fd", VARCHAR2(1)),
|
||||
Column("compile_state", VARCHAR2(19)),
|
||||
Column("use_no_index", VARCHAR2(1)),
|
||||
Column("stale_since", DATE),
|
||||
Column("num_pct_tables", NUMBER),
|
||||
Column("num_fresh_pct_regions", NUMBER),
|
||||
Column("num_stale_pct_regions", NUMBER),
|
||||
Column("segment_created", VARCHAR2(3)),
|
||||
Column("evaluation_edition", VARCHAR2(128)),
|
||||
Column("unusable_before", VARCHAR2(128)),
|
||||
Column("unusable_beginning", VARCHAR2(128)),
|
||||
Column("default_collation", VARCHAR2(100)),
|
||||
Column("on_query_computation", VARCHAR2(1)),
|
||||
Column("auto", VARCHAR2(3)),
|
||||
).alias("a_mviews")
|
||||
|
||||
all_tab_identity_cols = Table(
|
||||
"all_tab_identity_cols" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_name", VARCHAR2(128), nullable=False),
|
||||
Column("generation_type", VARCHAR2(10)),
|
||||
Column("sequence_name", VARCHAR2(128), nullable=False),
|
||||
Column("identity_options", VARCHAR2(298)),
|
||||
).alias("a_tab_identity_cols")
|
||||
|
||||
all_tab_cols = Table(
|
||||
"all_tab_cols" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_name", VARCHAR2(128), nullable=False),
|
||||
Column("data_type", VARCHAR2(128)),
|
||||
Column("data_type_mod", VARCHAR2(3)),
|
||||
Column("data_type_owner", VARCHAR2(128)),
|
||||
Column("data_length", NUMBER, nullable=False),
|
||||
Column("data_precision", NUMBER),
|
||||
Column("data_scale", NUMBER),
|
||||
Column("nullable", VARCHAR2(1)),
|
||||
Column("column_id", NUMBER),
|
||||
Column("default_length", NUMBER),
|
||||
Column("data_default", LONG),
|
||||
Column("num_distinct", NUMBER),
|
||||
Column("low_value", RAW(1000)),
|
||||
Column("high_value", RAW(1000)),
|
||||
Column("density", NUMBER),
|
||||
Column("num_nulls", NUMBER),
|
||||
Column("num_buckets", NUMBER),
|
||||
Column("last_analyzed", DATE),
|
||||
Column("sample_size", NUMBER),
|
||||
Column("character_set_name", VARCHAR2(44)),
|
||||
Column("char_col_decl_length", NUMBER),
|
||||
Column("global_stats", VARCHAR2(3)),
|
||||
Column("user_stats", VARCHAR2(3)),
|
||||
Column("avg_col_len", NUMBER),
|
||||
Column("char_length", NUMBER),
|
||||
Column("char_used", VARCHAR2(1)),
|
||||
Column("v80_fmt_image", VARCHAR2(3)),
|
||||
Column("data_upgraded", VARCHAR2(3)),
|
||||
Column("hidden_column", VARCHAR2(3)),
|
||||
Column("virtual_column", VARCHAR2(3)),
|
||||
Column("segment_column_id", NUMBER),
|
||||
Column("internal_column_id", NUMBER, nullable=False),
|
||||
Column("histogram", VARCHAR2(15)),
|
||||
Column("qualified_col_name", VARCHAR2(4000)),
|
||||
Column("user_generated", VARCHAR2(3)),
|
||||
Column("default_on_null", VARCHAR2(3)),
|
||||
Column("identity_column", VARCHAR2(3)),
|
||||
Column("evaluation_edition", VARCHAR2(128)),
|
||||
Column("unusable_before", VARCHAR2(128)),
|
||||
Column("unusable_beginning", VARCHAR2(128)),
|
||||
Column("collation", VARCHAR2(100)),
|
||||
Column("collated_column_id", NUMBER),
|
||||
).alias("a_tab_cols")
|
||||
|
||||
all_tab_comments = Table(
|
||||
"all_tab_comments" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("table_type", VARCHAR2(11)),
|
||||
Column("comments", VARCHAR2(4000)),
|
||||
Column("origin_con_id", NUMBER),
|
||||
).alias("a_tab_comments")
|
||||
|
||||
all_col_comments = Table(
|
||||
"all_col_comments" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_name", VARCHAR2(128), nullable=False),
|
||||
Column("comments", VARCHAR2(4000)),
|
||||
Column("origin_con_id", NUMBER),
|
||||
).alias("a_col_comments")
|
||||
|
||||
all_mview_comments = Table(
|
||||
"all_mview_comments" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("mview_name", VARCHAR2(128), nullable=False),
|
||||
Column("comments", VARCHAR2(4000)),
|
||||
).alias("a_mview_comments")
|
||||
|
||||
all_ind_columns = Table(
|
||||
"all_ind_columns" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("index_owner", VARCHAR2(128), nullable=False),
|
||||
Column("index_name", VARCHAR2(128), nullable=False),
|
||||
Column("table_owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_name", VARCHAR2(4000)),
|
||||
Column("column_position", NUMBER, nullable=False),
|
||||
Column("column_length", NUMBER, nullable=False),
|
||||
Column("char_length", NUMBER),
|
||||
Column("descend", VARCHAR2(4)),
|
||||
Column("collated_column_id", NUMBER),
|
||||
).alias("a_ind_columns")
|
||||
|
||||
all_indexes = Table(
|
||||
"all_indexes" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("index_name", VARCHAR2(128), nullable=False),
|
||||
Column("index_type", VARCHAR2(27)),
|
||||
Column("table_owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("table_type", CHAR(11)),
|
||||
Column("uniqueness", VARCHAR2(9)),
|
||||
Column("compression", VARCHAR2(13)),
|
||||
Column("prefix_length", NUMBER),
|
||||
Column("tablespace_name", VARCHAR2(30)),
|
||||
Column("ini_trans", NUMBER),
|
||||
Column("max_trans", NUMBER),
|
||||
Column("initial_extent", NUMBER),
|
||||
Column("next_extent", NUMBER),
|
||||
Column("min_extents", NUMBER),
|
||||
Column("max_extents", NUMBER),
|
||||
Column("pct_increase", NUMBER),
|
||||
Column("pct_threshold", NUMBER),
|
||||
Column("include_column", NUMBER),
|
||||
Column("freelists", NUMBER),
|
||||
Column("freelist_groups", NUMBER),
|
||||
Column("pct_free", NUMBER),
|
||||
Column("logging", VARCHAR2(3)),
|
||||
Column("blevel", NUMBER),
|
||||
Column("leaf_blocks", NUMBER),
|
||||
Column("distinct_keys", NUMBER),
|
||||
Column("avg_leaf_blocks_per_key", NUMBER),
|
||||
Column("avg_data_blocks_per_key", NUMBER),
|
||||
Column("clustering_factor", NUMBER),
|
||||
Column("status", VARCHAR2(8)),
|
||||
Column("num_rows", NUMBER),
|
||||
Column("sample_size", NUMBER),
|
||||
Column("last_analyzed", DATE),
|
||||
Column("degree", VARCHAR2(40)),
|
||||
Column("instances", VARCHAR2(40)),
|
||||
Column("partitioned", VARCHAR2(3)),
|
||||
Column("temporary", VARCHAR2(1)),
|
||||
Column("generated", VARCHAR2(1)),
|
||||
Column("secondary", VARCHAR2(1)),
|
||||
Column("buffer_pool", VARCHAR2(7)),
|
||||
Column("flash_cache", VARCHAR2(7)),
|
||||
Column("cell_flash_cache", VARCHAR2(7)),
|
||||
Column("user_stats", VARCHAR2(3)),
|
||||
Column("duration", VARCHAR2(15)),
|
||||
Column("pct_direct_access", NUMBER),
|
||||
Column("ityp_owner", VARCHAR2(128)),
|
||||
Column("ityp_name", VARCHAR2(128)),
|
||||
Column("parameters", VARCHAR2(1000)),
|
||||
Column("global_stats", VARCHAR2(3)),
|
||||
Column("domidx_status", VARCHAR2(12)),
|
||||
Column("domidx_opstatus", VARCHAR2(6)),
|
||||
Column("funcidx_status", VARCHAR2(8)),
|
||||
Column("join_index", VARCHAR2(3)),
|
||||
Column("iot_redundant_pkey_elim", VARCHAR2(3)),
|
||||
Column("dropped", VARCHAR2(3)),
|
||||
Column("visibility", VARCHAR2(9)),
|
||||
Column("domidx_management", VARCHAR2(14)),
|
||||
Column("segment_created", VARCHAR2(3)),
|
||||
Column("orphaned_entries", VARCHAR2(3)),
|
||||
Column("indexing", VARCHAR2(7)),
|
||||
Column("auto", VARCHAR2(3)),
|
||||
).alias("a_indexes")
|
||||
|
||||
all_ind_expressions = Table(
|
||||
"all_ind_expressions" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("index_owner", VARCHAR2(128), nullable=False),
|
||||
Column("index_name", VARCHAR2(128), nullable=False),
|
||||
Column("table_owner", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_expression", LONG),
|
||||
Column("column_position", NUMBER, nullable=False),
|
||||
).alias("a_ind_expressions")
|
||||
|
||||
all_constraints = Table(
|
||||
"all_constraints" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128)),
|
||||
Column("constraint_name", VARCHAR2(128)),
|
||||
Column("constraint_type", VARCHAR2(1)),
|
||||
Column("table_name", VARCHAR2(128)),
|
||||
Column("search_condition", LONG),
|
||||
Column("search_condition_vc", VARCHAR2(4000)),
|
||||
Column("r_owner", VARCHAR2(128)),
|
||||
Column("r_constraint_name", VARCHAR2(128)),
|
||||
Column("delete_rule", VARCHAR2(9)),
|
||||
Column("status", VARCHAR2(8)),
|
||||
Column("deferrable", VARCHAR2(14)),
|
||||
Column("deferred", VARCHAR2(9)),
|
||||
Column("validated", VARCHAR2(13)),
|
||||
Column("generated", VARCHAR2(14)),
|
||||
Column("bad", VARCHAR2(3)),
|
||||
Column("rely", VARCHAR2(4)),
|
||||
Column("last_change", DATE),
|
||||
Column("index_owner", VARCHAR2(128)),
|
||||
Column("index_name", VARCHAR2(128)),
|
||||
Column("invalid", VARCHAR2(7)),
|
||||
Column("view_related", VARCHAR2(14)),
|
||||
Column("origin_con_id", VARCHAR2(256)),
|
||||
).alias("a_constraints")
|
||||
|
||||
all_cons_columns = Table(
|
||||
"all_cons_columns" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("constraint_name", VARCHAR2(128), nullable=False),
|
||||
Column("table_name", VARCHAR2(128), nullable=False),
|
||||
Column("column_name", VARCHAR2(4000)),
|
||||
Column("position", NUMBER),
|
||||
).alias("a_cons_columns")
|
||||
|
||||
# TODO figure out if it's still relevant, since there is no mention from here
|
||||
# https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/ALL_DB_LINKS.html
|
||||
# original note:
|
||||
# using user_db_links here since all_db_links appears
|
||||
# to have more restricted permissions.
|
||||
# https://docs.oracle.com/cd/B28359_01/server.111/b28310/ds_admin005.htm
|
||||
# will need to hear from more users if we are doing
|
||||
# the right thing here. See [ticket:2619]
|
||||
all_db_links = Table(
|
||||
"all_db_links" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("db_link", VARCHAR2(128), nullable=False),
|
||||
Column("username", VARCHAR2(128)),
|
||||
Column("host", VARCHAR2(2000)),
|
||||
Column("created", DATE, nullable=False),
|
||||
Column("hidden", VARCHAR2(3)),
|
||||
Column("shard_internal", VARCHAR2(3)),
|
||||
Column("valid", VARCHAR2(3)),
|
||||
Column("intra_cdb", VARCHAR2(3)),
|
||||
).alias("a_db_links")
|
||||
|
||||
all_synonyms = Table(
|
||||
"all_synonyms" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128)),
|
||||
Column("synonym_name", VARCHAR2(128)),
|
||||
Column("table_owner", VARCHAR2(128)),
|
||||
Column("table_name", VARCHAR2(128)),
|
||||
Column("db_link", VARCHAR2(128)),
|
||||
Column("origin_con_id", VARCHAR2(256)),
|
||||
).alias("a_synonyms")
|
||||
|
||||
all_objects = Table(
|
||||
"all_objects" + DB_LINK_PLACEHOLDER,
|
||||
dictionary_meta,
|
||||
Column("owner", VARCHAR2(128), nullable=False),
|
||||
Column("object_name", VARCHAR2(128), nullable=False),
|
||||
Column("subobject_name", VARCHAR2(128)),
|
||||
Column("object_id", NUMBER, nullable=False),
|
||||
Column("data_object_id", NUMBER),
|
||||
Column("object_type", VARCHAR2(23)),
|
||||
Column("created", DATE, nullable=False),
|
||||
Column("last_ddl_time", DATE, nullable=False),
|
||||
Column("timestamp", VARCHAR2(19)),
|
||||
Column("status", VARCHAR2(7)),
|
||||
Column("temporary", VARCHAR2(1)),
|
||||
Column("generated", VARCHAR2(1)),
|
||||
Column("secondary", VARCHAR2(1)),
|
||||
Column("namespace", NUMBER, nullable=False),
|
||||
Column("edition_name", VARCHAR2(128)),
|
||||
Column("sharing", VARCHAR2(13)),
|
||||
Column("editionable", VARCHAR2(1)),
|
||||
Column("oracle_maintained", VARCHAR2(1)),
|
||||
Column("application", VARCHAR2(1)),
|
||||
Column("default_collation", VARCHAR2(100)),
|
||||
Column("duplicated", VARCHAR2(1)),
|
||||
Column("sharded", VARCHAR2(1)),
|
||||
Column("created_appid", NUMBER),
|
||||
Column("created_vsnid", NUMBER),
|
||||
Column("modified_appid", NUMBER),
|
||||
Column("modified_vsnid", NUMBER),
|
||||
).alias("a_objects")
|
@ -0,0 +1,947 @@
|
||||
# dialects/oracle/oracledb.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r""".. dialect:: oracle+oracledb
|
||||
:name: python-oracledb
|
||||
:dbapi: oracledb
|
||||
:connectstring: oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=<service>[&key=value&key=value...]]
|
||||
:url: https://oracle.github.io/python-oracledb/
|
||||
|
||||
Description
|
||||
-----------
|
||||
|
||||
Python-oracledb is the Oracle Database driver for Python. It features a default
|
||||
"thin" client mode that requires no dependencies, and an optional "thick" mode
|
||||
that uses Oracle Client libraries. It supports SQLAlchemy features including
|
||||
two phase transactions and Asyncio.
|
||||
|
||||
Python-oracle is the renamed, updated cx_Oracle driver. Oracle is no longer
|
||||
doing any releases in the cx_Oracle namespace.
|
||||
|
||||
The SQLAlchemy ``oracledb`` dialect provides both a sync and an async
|
||||
implementation under the same dialect name. The proper version is
|
||||
selected depending on how the engine is created:
|
||||
|
||||
* calling :func:`_sa.create_engine` with ``oracle+oracledb://...`` will
|
||||
automatically select the sync version::
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
sync_engine = create_engine(
|
||||
"oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1"
|
||||
)
|
||||
|
||||
* calling :func:`_asyncio.create_async_engine` with ``oracle+oracledb://...``
|
||||
will automatically select the async version::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
asyncio_engine = create_async_engine(
|
||||
"oracle+oracledb://scott:tiger@localhost?service_name=FREEPDB1"
|
||||
)
|
||||
|
||||
The asyncio version of the dialect may also be specified explicitly using the
|
||||
``oracledb_async`` suffix::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
asyncio_engine = create_async_engine(
|
||||
"oracle+oracledb_async://scott:tiger@localhost?service_name=FREEPDB1"
|
||||
)
|
||||
|
||||
.. versionadded:: 2.0.25 added support for the async version of oracledb.
|
||||
|
||||
Thick mode support
|
||||
------------------
|
||||
|
||||
By default, the python-oracledb driver runs in a "thin" mode that does not
|
||||
require Oracle Client libraries to be installed. The driver also supports a
|
||||
"thick" mode that uses Oracle Client libraries to get functionality such as
|
||||
Oracle Application Continuity.
|
||||
|
||||
To enable thick mode, call `oracledb.init_oracle_client()
|
||||
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.init_oracle_client>`_
|
||||
explicitly, or pass the parameter ``thick_mode=True`` to
|
||||
:func:`_sa.create_engine`. To pass custom arguments to
|
||||
``init_oracle_client()``, like the ``lib_dir`` path, a dict may be passed, for
|
||||
example::
|
||||
|
||||
engine = sa.create_engine(
|
||||
"oracle+oracledb://...",
|
||||
thick_mode={
|
||||
"lib_dir": "/path/to/oracle/client/lib",
|
||||
"config_dir": "/path/to/network_config_file_directory",
|
||||
"driver_name": "my-app : 1.0.0",
|
||||
},
|
||||
)
|
||||
|
||||
Note that passing a ``lib_dir`` path should only be done on macOS or
|
||||
Windows. On Linux it does not behave as you might expect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
python-oracledb documentation `Enabling python-oracledb Thick mode
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/initialization.html#enabling-python-oracledb-thick-mode>`_
|
||||
|
||||
Connecting to Oracle Database
|
||||
-----------------------------
|
||||
|
||||
python-oracledb provides several methods of indicating the target database.
|
||||
The dialect translates from a series of different URL forms.
|
||||
|
||||
Given the hostname, port and service name of the target database, you can
|
||||
connect in SQLAlchemy using the ``service_name`` query string parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://scott:tiger@hostname:port?service_name=myservice"
|
||||
)
|
||||
|
||||
Connecting with Easy Connect strings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can pass any valid python-oracledb connection string as the ``dsn`` key
|
||||
value in a :paramref:`_sa.create_engine.connect_args` dictionary. See
|
||||
python-oracledb documentation `Oracle Net Services Connection Strings
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/connection_handling.html#oracle-net-services-connection-strings>`_.
|
||||
|
||||
For example to use an `Easy Connect string
|
||||
<https://download.oracle.com/ocomdocs/global/Oracle-Net-Easy-Connect-Plus.pdf>`_
|
||||
with a timeout to prevent connection establishment from hanging if the network
|
||||
transport to the database cannot be establishd in 30 seconds, and also setting
|
||||
a keep-alive time of 60 seconds to stop idle network connections from being
|
||||
terminated by a firewall::
|
||||
|
||||
e = create_engine(
|
||||
"oracle+oracledb://@",
|
||||
connect_args={
|
||||
"user": "scott",
|
||||
"password": "tiger",
|
||||
"dsn": "hostname:port/myservice?transport_connect_timeout=30&expire_time=60",
|
||||
},
|
||||
)
|
||||
|
||||
The Easy Connect syntax has been enhanced during the life of Oracle Database.
|
||||
Review the documentation for your database version. The current documentation
|
||||
is at `Understanding the Easy Connect Naming Method
|
||||
<https://www.oracle.com/pls/topic/lookup?ctx=dblatest&id=GUID-B0437826-43C1-49EC-A94D-B650B6A4A6EE>`_.
|
||||
|
||||
The general syntax is similar to:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
[[protocol:]//]host[:port][/[service_name]][?parameter_name=value{¶meter_name=value}]
|
||||
|
||||
Note that although the SQLAlchemy URL syntax ``hostname:port/dbname`` looks
|
||||
like Oracle's Easy Connect syntax, it is different. SQLAlchemy's URL requires a
|
||||
system identifier (SID) for the ``dbname`` component::
|
||||
|
||||
engine = create_engine("oracle+oracledb://scott:tiger@hostname:port/sid")
|
||||
|
||||
Easy Connect syntax does not support SIDs. It uses services names, which are
|
||||
the preferred choice for connecting to Oracle Database.
|
||||
|
||||
Passing python-oracledb connect arguments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Other python-oracledb driver `connection options
|
||||
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.connect>`_
|
||||
can be passed in ``connect_args``. For example::
|
||||
|
||||
e = create_engine(
|
||||
"oracle+oracledb://@",
|
||||
connect_args={
|
||||
"user": "scott",
|
||||
"password": "tiger",
|
||||
"dsn": "hostname:port/myservice",
|
||||
"events": True,
|
||||
"mode": oracledb.AUTH_MODE_SYSDBA,
|
||||
},
|
||||
)
|
||||
|
||||
Connecting with tnsnames.ora TNS aliases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If no port, database name, or service name is provided, the dialect will use an
|
||||
Oracle Database DSN "connection string". This takes the "hostname" portion of
|
||||
the URL as the data source name. For example, if the ``tnsnames.ora`` file
|
||||
contains a `TNS Alias
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/connection_handling.html#tns-aliases-for-connection-strings>`_
|
||||
of ``myalias`` as below:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
myalias =
|
||||
(DESCRIPTION =
|
||||
(ADDRESS = (PROTOCOL = TCP)(HOST = mymachine.example.com)(PORT = 1521))
|
||||
(CONNECT_DATA =
|
||||
(SERVER = DEDICATED)
|
||||
(SERVICE_NAME = orclpdb1)
|
||||
)
|
||||
)
|
||||
|
||||
The python-oracledb dialect connects to this database service when ``myalias`` is the
|
||||
hostname portion of the URL, without specifying a port, database name or
|
||||
``service_name``::
|
||||
|
||||
engine = create_engine("oracle+oracledb://scott:tiger@myalias")
|
||||
|
||||
Connecting to Oracle Autonomous Database
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Users of Oracle Autonomous Database should use either use the TNS Alias URL
|
||||
shown above, or pass the TNS Alias as the ``dsn`` key value in a
|
||||
:paramref:`_sa.create_engine.connect_args` dictionary.
|
||||
|
||||
If Oracle Autonomous Database is configured for mutual TLS ("mTLS")
|
||||
connections, then additional configuration is required as shown in `Connecting
|
||||
to Oracle Cloud Autonomous Databases
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/connection_handling.html#connecting-to-oracle-cloud-autonomous-databases>`_. In
|
||||
summary, Thick mode users should configure file locations and set the wallet
|
||||
path in ``sqlnet.ora`` appropriately::
|
||||
|
||||
e = create_engine(
|
||||
"oracle+oracledb://@",
|
||||
thick_mode={
|
||||
# directory containing tnsnames.ora and cwallet.so
|
||||
"config_dir": "/opt/oracle/wallet_dir",
|
||||
},
|
||||
connect_args={
|
||||
"user": "scott",
|
||||
"password": "tiger",
|
||||
"dsn": "mydb_high",
|
||||
},
|
||||
)
|
||||
|
||||
Thin mode users of mTLS should pass the appropriate directories and PEM wallet
|
||||
password when creating the engine, similar to::
|
||||
|
||||
e = create_engine(
|
||||
"oracle+oracledb://@",
|
||||
connect_args={
|
||||
"user": "scott",
|
||||
"password": "tiger",
|
||||
"dsn": "mydb_high",
|
||||
"config_dir": "/opt/oracle/wallet_dir", # directory containing tnsnames.ora
|
||||
"wallet_location": "/opt/oracle/wallet_dir", # directory containing ewallet.pem
|
||||
"wallet_password": "top secret", # password for the PEM file
|
||||
},
|
||||
)
|
||||
|
||||
Typically ``config_dir`` and ``wallet_location`` are the same directory, which
|
||||
is where the Oracle Autonomous Database wallet zip file was extracted. Note
|
||||
this directory should be protected.
|
||||
|
||||
Connection Pooling
|
||||
------------------
|
||||
|
||||
Applications with multiple concurrent users should use connection pooling. A
|
||||
minimal sized connection pool is also beneficial for long-running, single-user
|
||||
applications that do not frequently use a connection.
|
||||
|
||||
The python-oracledb driver provides its own connection pool implementation that
|
||||
may be used in place of SQLAlchemy's pooling functionality. The driver pool
|
||||
gives support for high availability features such as dead connection detection,
|
||||
connection draining for planned database downtime, support for Oracle
|
||||
Application Continuity and Transparent Application Continuity, and gives
|
||||
support for `Database Resident Connection Pooling (DRCP)
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/connection_handling.html#database-resident-connection-pooling-drcp>`_.
|
||||
|
||||
To take advantage of python-oracledb's pool, use the
|
||||
:paramref:`_sa.create_engine.creator` parameter to provide a function that
|
||||
returns a new connection, along with setting
|
||||
:paramref:`_sa.create_engine.pool_class` to ``NullPool`` to disable
|
||||
SQLAlchemy's pooling::
|
||||
|
||||
import oracledb
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.pool import NullPool
|
||||
|
||||
# Uncomment to use the optional python-oracledb Thick mode.
|
||||
# Review the python-oracledb doc for the appropriate parameters
|
||||
# oracledb.init_oracle_client(<your parameters>)
|
||||
|
||||
pool = oracledb.create_pool(
|
||||
user="scott",
|
||||
password="tiger",
|
||||
dsn="localhost:1521/freepdb1",
|
||||
min=1,
|
||||
max=4,
|
||||
increment=1,
|
||||
)
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://", creator=pool.acquire, poolclass=NullPool
|
||||
)
|
||||
|
||||
The above engine may then be used normally. Internally, python-oracledb handles
|
||||
connection pooling::
|
||||
|
||||
with engine.connect() as conn:
|
||||
print(conn.scalar(text("select 1 from dual")))
|
||||
|
||||
Refer to the python-oracledb documentation for `oracledb.create_pool()
|
||||
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.create_pool>`_
|
||||
for the arguments that can be used when creating a connection pool.
|
||||
|
||||
.. _drcp:
|
||||
|
||||
Using Oracle Database Resident Connection Pooling (DRCP)
|
||||
--------------------------------------------------------
|
||||
|
||||
When using Oracle Database's Database Resident Connection Pooling (DRCP), the
|
||||
best practice is to specify a connection class and "purity". Refer to the
|
||||
`python-oracledb documentation on DRCP
|
||||
<https://python-oracledb.readthedocs.io/en/latest/user_guide/connection_handling.html#database-resident-connection-pooling-drcp>`_.
|
||||
For example::
|
||||
|
||||
import oracledb
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.pool import NullPool
|
||||
|
||||
# Uncomment to use the optional python-oracledb Thick mode.
|
||||
# Review the python-oracledb doc for the appropriate parameters
|
||||
# oracledb.init_oracle_client(<your parameters>)
|
||||
|
||||
pool = oracledb.create_pool(
|
||||
user="scott",
|
||||
password="tiger",
|
||||
dsn="localhost:1521/freepdb1",
|
||||
min=1,
|
||||
max=4,
|
||||
increment=1,
|
||||
cclass="MYCLASS",
|
||||
purity=oracledb.PURITY_SELF,
|
||||
)
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://", creator=pool.acquire, poolclass=NullPool
|
||||
)
|
||||
|
||||
The above engine may then be used normally where python-oracledb handles
|
||||
application connection pooling and Oracle Database additionally uses DRCP::
|
||||
|
||||
with engine.connect() as conn:
|
||||
print(conn.scalar(text("select 1 from dual")))
|
||||
|
||||
If you wish to use different connection classes or purities for different
|
||||
connections, then wrap ``pool.acquire()``::
|
||||
|
||||
import oracledb
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.pool import NullPool
|
||||
|
||||
# Uncomment to use python-oracledb Thick mode.
|
||||
# Review the python-oracledb doc for the appropriate parameters
|
||||
# oracledb.init_oracle_client(<your parameters>)
|
||||
|
||||
pool = oracledb.create_pool(
|
||||
user="scott",
|
||||
password="tiger",
|
||||
dsn="localhost:1521/freepdb1",
|
||||
min=1,
|
||||
max=4,
|
||||
increment=1,
|
||||
cclass="MYCLASS",
|
||||
purity=oracledb.PURITY_SELF,
|
||||
)
|
||||
|
||||
|
||||
def creator():
|
||||
return pool.acquire(cclass="MYOTHERCLASS", purity=oracledb.PURITY_NEW)
|
||||
|
||||
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://", creator=creator, poolclass=NullPool
|
||||
)
|
||||
|
||||
Engine Options consumed by the SQLAlchemy oracledb dialect outside of the driver
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
There are also options that are consumed by the SQLAlchemy oracledb dialect
|
||||
itself. These options are always passed directly to :func:`_sa.create_engine`,
|
||||
such as::
|
||||
|
||||
e = create_engine("oracle+oracledb://user:pass@tnsalias", arraysize=500)
|
||||
|
||||
The parameters accepted by the oracledb dialect are as follows:
|
||||
|
||||
* ``arraysize`` - set the driver cursor.arraysize value. It defaults to
|
||||
``None``, indicating that the driver default value of 100 should be used.
|
||||
This setting controls how many rows are buffered when fetching rows, and can
|
||||
have a significant effect on performance if increased for queries that return
|
||||
large numbers of rows.
|
||||
|
||||
.. versionchanged:: 2.0.26 - changed the default value from 50 to None,
|
||||
to use the default value of the driver itself.
|
||||
|
||||
* ``auto_convert_lobs`` - defaults to True; See :ref:`oracledb_lob`.
|
||||
|
||||
* ``coerce_to_decimal`` - see :ref:`oracledb_numeric` for detail.
|
||||
|
||||
* ``encoding_errors`` - see :ref:`oracledb_unicode_encoding_errors` for detail.
|
||||
|
||||
.. _oracledb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
As is the case for all DBAPIs under Python 3, all strings are inherently
|
||||
Unicode strings.
|
||||
|
||||
Ensuring the Correct Client Encoding
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In python-oracledb, the encoding used for all character data is "UTF-8".
|
||||
|
||||
Unicode-specific Column datatypes
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Core expression language handles unicode data by use of the
|
||||
:class:`.Unicode` and :class:`.UnicodeText` datatypes. These types correspond
|
||||
to the VARCHAR2 and CLOB Oracle Database datatypes by default. When using
|
||||
these datatypes with Unicode data, it is expected that the database is
|
||||
configured with a Unicode-aware character set so that the VARCHAR2 and CLOB
|
||||
datatypes can accommodate the data.
|
||||
|
||||
In the case that Oracle Database is not configured with a Unicode character
|
||||
set, the two options are to use the :class:`_types.NCHAR` and
|
||||
:class:`_oracle.NCLOB` datatypes explicitly, or to pass the flag
|
||||
``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, which will cause
|
||||
the SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` /
|
||||
:class:`.UnicodeText` datatypes instead of VARCHAR/CLOB.
|
||||
|
||||
.. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText`
|
||||
datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle Database
|
||||
datatypes unless the ``use_nchar_for_unicode=True`` is passed to the dialect
|
||||
when :func:`_sa.create_engine` is called.
|
||||
|
||||
|
||||
.. _oracledb_unicode_encoding_errors:
|
||||
|
||||
Encoding Errors
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
For the unusual case that data in Oracle Database is present with a broken
|
||||
encoding, the dialect accepts a parameter ``encoding_errors`` which will be
|
||||
passed to Unicode decoding functions in order to affect how decoding errors are
|
||||
handled. The value is ultimately consumed by the Python `decode
|
||||
<https://docs.python.org/3/library/stdtypes.html#bytes.decode>`_ function, and
|
||||
is passed both via python-oracledb's ``encodingErrors`` parameter consumed by
|
||||
``Cursor.var()``, as well as SQLAlchemy's own decoding function, as the
|
||||
python-oracledb dialect makes use of both under different circumstances.
|
||||
|
||||
.. versionadded:: 1.3.11
|
||||
|
||||
|
||||
.. _oracledb_setinputsizes:
|
||||
|
||||
Fine grained control over python-oracledb data binding with setinputsizes
|
||||
-------------------------------------------------------------------------
|
||||
|
||||
The python-oracle DBAPI has a deep and fundamental reliance upon the usage of
|
||||
the DBAPI ``setinputsizes()`` call. The purpose of this call is to establish
|
||||
the datatypes that are bound to a SQL statement for Python values being passed
|
||||
as parameters. While virtually no other DBAPI assigns any use to the
|
||||
``setinputsizes()`` call, the python-oracledb DBAPI relies upon it heavily in
|
||||
its interactions with the Oracle Database, and in some scenarios it is not
|
||||
possible for SQLAlchemy to know exactly how data should be bound, as some
|
||||
settings can cause profoundly different performance characteristics, while
|
||||
altering the type coercion behavior at the same time.
|
||||
|
||||
Users of the oracledb dialect are **strongly encouraged** to read through
|
||||
python-oracledb's list of built-in datatype symbols at `Database Types
|
||||
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#database-types>`_
|
||||
Note that in some cases, significant performance degradation can occur when
|
||||
using these types vs. not.
|
||||
|
||||
On the SQLAlchemy side, the :meth:`.DialectEvents.do_setinputsizes` event can
|
||||
be used both for runtime visibility (e.g. logging) of the setinputsizes step as
|
||||
well as to fully control how ``setinputsizes()`` is used on a per-statement
|
||||
basis.
|
||||
|
||||
.. versionadded:: 1.2.9 Added :meth:`.DialectEvents.setinputsizes`
|
||||
|
||||
|
||||
Example 1 - logging all setinputsizes calls
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The following example illustrates how to log the intermediary values from a
|
||||
SQLAlchemy perspective before they are converted to the raw ``setinputsizes()``
|
||||
parameter dictionary. The keys of the dictionary are :class:`.BindParameter`
|
||||
objects which have a ``.key`` and a ``.type`` attribute::
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1"
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(engine, "do_setinputsizes")
|
||||
def _log_setinputsizes(inputsizes, cursor, statement, parameters, context):
|
||||
for bindparam, dbapitype in inputsizes.items():
|
||||
log.info(
|
||||
"Bound parameter name: %s SQLAlchemy type: %r DBAPI object: %s",
|
||||
bindparam.key,
|
||||
bindparam.type,
|
||||
dbapitype,
|
||||
)
|
||||
|
||||
Example 2 - remove all bindings to CLOB
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For performance, fetching LOB datatypes from Oracle Database is set by default
|
||||
for the ``Text`` type within SQLAlchemy. This setting can be modified as
|
||||
follows::
|
||||
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
from oracledb import CLOB
|
||||
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://scott:tiger@localhost:1521?service_name=freepdb1"
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(engine, "do_setinputsizes")
|
||||
def _remove_clob(inputsizes, cursor, statement, parameters, context):
|
||||
for bindparam, dbapitype in list(inputsizes.items()):
|
||||
if dbapitype is CLOB:
|
||||
del inputsizes[bindparam]
|
||||
|
||||
.. _oracledb_lob:
|
||||
|
||||
LOB Datatypes
|
||||
--------------
|
||||
|
||||
LOB datatypes refer to the "large object" datatypes such as CLOB, NCLOB and
|
||||
BLOB. Oracle Database can efficiently return these datatypes as a single
|
||||
buffer. SQLAlchemy makes use of type handlers to do this by default.
|
||||
|
||||
To disable the use of the type handlers and deliver LOB objects as classic
|
||||
buffered objects with a ``read()`` method, the parameter
|
||||
``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`.
|
||||
|
||||
.. _oracledb_returning:
|
||||
|
||||
RETURNING Support
|
||||
-----------------
|
||||
|
||||
The oracledb dialect implements RETURNING using OUT parameters. The dialect
|
||||
supports RETURNING fully.
|
||||
|
||||
Two Phase Transaction Support
|
||||
-----------------------------
|
||||
|
||||
Two phase transactions are fully supported with python-oracledb. (Thin mode
|
||||
requires python-oracledb 2.3). APIs for two phase transactions are provided at
|
||||
the Core level via :meth:`_engine.Connection.begin_twophase` and
|
||||
:paramref:`_orm.Session.twophase` for transparent ORM use.
|
||||
|
||||
.. versionchanged:: 2.0.32 added support for two phase transactions
|
||||
|
||||
.. _oracledb_numeric:
|
||||
|
||||
Precision Numerics
|
||||
------------------
|
||||
|
||||
SQLAlchemy's numeric types can handle receiving and returning values as Python
|
||||
``Decimal`` objects or float objects. When a :class:`.Numeric` object, or a
|
||||
subclass such as :class:`.Float`, :class:`_oracle.DOUBLE_PRECISION` etc. is in
|
||||
use, the :paramref:`.Numeric.asdecimal` flag determines if values should be
|
||||
coerced to ``Decimal`` upon return, or returned as float objects. To make
|
||||
matters more complicated under Oracle Database, the ``NUMBER`` type can also
|
||||
represent integer values if the "scale" is zero, so the Oracle
|
||||
Database-specific :class:`_oracle.NUMBER` type takes this into account as well.
|
||||
|
||||
The oracledb dialect makes extensive use of connection- and cursor-level
|
||||
"outputtypehandler" callables in order to coerce numeric values as requested.
|
||||
These callables are specific to the specific flavor of :class:`.Numeric` in
|
||||
use, as well as if no SQLAlchemy typing objects are present. There are
|
||||
observed scenarios where Oracle Database may send incomplete or ambiguous
|
||||
information about the numeric types being returned, such as a query where the
|
||||
numeric types are buried under multiple levels of subquery. The type handlers
|
||||
do their best to make the right decision in all cases, deferring to the
|
||||
underlying python-oracledb DBAPI for all those cases where the driver can make
|
||||
the best decision.
|
||||
|
||||
When no typing objects are present, as when executing plain SQL strings, a
|
||||
default "outputtypehandler" is present which will generally return numeric
|
||||
values which specify precision and scale as Python ``Decimal`` objects. To
|
||||
disable this coercion to decimal for performance reasons, pass the flag
|
||||
``coerce_to_decimal=False`` to :func:`_sa.create_engine`::
|
||||
|
||||
engine = create_engine(
|
||||
"oracle+oracledb://scott:tiger@tnsalias", coerce_to_decimal=False
|
||||
)
|
||||
|
||||
The ``coerce_to_decimal`` flag only impacts the results of plain string
|
||||
SQL statements that are not otherwise associated with a :class:`.Numeric`
|
||||
SQLAlchemy type (or a subclass of such).
|
||||
|
||||
.. versionchanged:: 1.2 The numeric handling system for the oracle dialects has
|
||||
been reworked to take advantage of newer driver features as well as better
|
||||
integration of outputtypehandlers.
|
||||
|
||||
.. versionadded:: 2.0.0 added support for the python-oracledb driver.
|
||||
|
||||
""" # noqa
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from . import cx_oracle as _cx_oracle
|
||||
from ... import exc
|
||||
from ... import pool
|
||||
from ...connectors.asyncio import AsyncAdapt_dbapi_connection
|
||||
from ...connectors.asyncio import AsyncAdapt_dbapi_cursor
|
||||
from ...connectors.asyncio import AsyncAdapt_dbapi_ss_cursor
|
||||
from ...connectors.asyncio import AsyncAdaptFallback_dbapi_connection
|
||||
from ...engine import default
|
||||
from ...util import asbool
|
||||
from ...util import await_fallback
|
||||
from ...util import await_only
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from oracledb import AsyncConnection
|
||||
from oracledb import AsyncCursor
|
||||
|
||||
|
||||
class OracleExecutionContext_oracledb(
|
||||
_cx_oracle.OracleExecutionContext_cx_oracle
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class OracleDialect_oracledb(_cx_oracle.OracleDialect_cx_oracle):
|
||||
supports_statement_cache = True
|
||||
execution_ctx_cls = OracleExecutionContext_oracledb
|
||||
|
||||
driver = "oracledb"
|
||||
_min_version = (1,)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
auto_convert_lobs=True,
|
||||
coerce_to_decimal=True,
|
||||
arraysize=None,
|
||||
encoding_errors=None,
|
||||
thick_mode=None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(
|
||||
auto_convert_lobs,
|
||||
coerce_to_decimal,
|
||||
arraysize,
|
||||
encoding_errors,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
if self.dbapi is not None and (
|
||||
thick_mode or isinstance(thick_mode, dict)
|
||||
):
|
||||
kw = thick_mode if isinstance(thick_mode, dict) else {}
|
||||
self.dbapi.init_oracle_client(**kw)
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
import oracledb
|
||||
|
||||
return oracledb
|
||||
|
||||
@classmethod
|
||||
def is_thin_mode(cls, connection):
|
||||
return connection.connection.dbapi_connection.thin
|
||||
|
||||
@classmethod
|
||||
def get_async_dialect_cls(cls, url):
|
||||
return OracleDialectAsync_oracledb
|
||||
|
||||
def _load_version(self, dbapi_module):
|
||||
version = (0, 0, 0)
|
||||
if dbapi_module is not None:
|
||||
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", dbapi_module.version)
|
||||
if m:
|
||||
version = tuple(
|
||||
int(x) for x in m.group(1, 2, 3) if x is not None
|
||||
)
|
||||
self.oracledb_ver = version
|
||||
if (
|
||||
self.oracledb_ver > (0, 0, 0)
|
||||
and self.oracledb_ver < self._min_version
|
||||
):
|
||||
raise exc.InvalidRequestError(
|
||||
f"oracledb version {self._min_version} and above are supported"
|
||||
)
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
conn_xis = connection.connection.xid(*xid)
|
||||
connection.connection.tpc_begin(conn_xis)
|
||||
connection.connection.info["oracledb_xid"] = conn_xis
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
should_commit = connection.connection.tpc_prepare()
|
||||
connection.info["oracledb_should_commit"] = should_commit
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
if recover:
|
||||
conn_xid = connection.connection.xid(*xid)
|
||||
else:
|
||||
conn_xid = None
|
||||
connection.connection.tpc_rollback(conn_xid)
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False
|
||||
):
|
||||
conn_xid = None
|
||||
if not is_prepared:
|
||||
should_commit = connection.connection.tpc_prepare()
|
||||
elif recover:
|
||||
conn_xid = connection.connection.xid(*xid)
|
||||
should_commit = True
|
||||
else:
|
||||
should_commit = connection.info["oracledb_should_commit"]
|
||||
if should_commit:
|
||||
connection.connection.tpc_commit(conn_xid)
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [
|
||||
# oracledb seems to return bytes
|
||||
(
|
||||
fi,
|
||||
gti.decode() if isinstance(gti, bytes) else gti,
|
||||
bq.decode() if isinstance(bq, bytes) else bq,
|
||||
)
|
||||
for fi, gti, bq in connection.connection.tpc_recover()
|
||||
]
|
||||
|
||||
def _check_max_identifier_length(self, connection):
|
||||
if self.oracledb_ver >= (2, 5):
|
||||
max_len = connection.connection.max_identifier_length
|
||||
if max_len is not None:
|
||||
return max_len
|
||||
return super()._check_max_identifier_length(connection)
|
||||
|
||||
|
||||
class AsyncAdapt_oracledb_cursor(AsyncAdapt_dbapi_cursor):
|
||||
_cursor: AsyncCursor
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
def outputtypehandler(self):
|
||||
return self._cursor.outputtypehandler
|
||||
|
||||
@outputtypehandler.setter
|
||||
def outputtypehandler(self, value):
|
||||
self._cursor.outputtypehandler = value
|
||||
|
||||
def var(self, *args, **kwargs):
|
||||
return self._cursor.var(*args, **kwargs)
|
||||
|
||||
def close(self):
|
||||
self._rows.clear()
|
||||
self._cursor.close()
|
||||
|
||||
def setinputsizes(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self._cursor.setinputsizes(*args, **kwargs)
|
||||
|
||||
def _aenter_cursor(self, cursor: AsyncCursor) -> AsyncCursor:
|
||||
try:
|
||||
return cursor.__enter__()
|
||||
except Exception as error:
|
||||
self._adapt_connection._handle_exception(error)
|
||||
|
||||
async def _execute_async(self, operation, parameters):
|
||||
# override to not use mutex, oracledb already has a mutex
|
||||
|
||||
if parameters is None:
|
||||
result = await self._cursor.execute(operation)
|
||||
else:
|
||||
result = await self._cursor.execute(operation, parameters)
|
||||
|
||||
if self._cursor.description and not self.server_side:
|
||||
self._rows = collections.deque(await self._cursor.fetchall())
|
||||
return result
|
||||
|
||||
async def _executemany_async(
|
||||
self,
|
||||
operation,
|
||||
seq_of_parameters,
|
||||
):
|
||||
# override to not use mutex, oracledb already has a mutex
|
||||
return await self._cursor.executemany(operation, seq_of_parameters)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
|
||||
self.close()
|
||||
|
||||
|
||||
class AsyncAdapt_oracledb_ss_cursor(
|
||||
AsyncAdapt_dbapi_ss_cursor, AsyncAdapt_oracledb_cursor
|
||||
):
|
||||
__slots__ = ()
|
||||
|
||||
def close(self) -> None:
|
||||
if self._cursor is not None:
|
||||
self._cursor.close()
|
||||
self._cursor = None # type: ignore
|
||||
|
||||
|
||||
class AsyncAdapt_oracledb_connection(AsyncAdapt_dbapi_connection):
|
||||
_connection: AsyncConnection
|
||||
__slots__ = ()
|
||||
|
||||
thin = True
|
||||
|
||||
_cursor_cls = AsyncAdapt_oracledb_cursor
|
||||
_ss_cursor_cls = None
|
||||
|
||||
@property
|
||||
def autocommit(self):
|
||||
return self._connection.autocommit
|
||||
|
||||
@autocommit.setter
|
||||
def autocommit(self, value):
|
||||
self._connection.autocommit = value
|
||||
|
||||
@property
|
||||
def outputtypehandler(self):
|
||||
return self._connection.outputtypehandler
|
||||
|
||||
@outputtypehandler.setter
|
||||
def outputtypehandler(self, value):
|
||||
self._connection.outputtypehandler = value
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._connection.version
|
||||
|
||||
@property
|
||||
def stmtcachesize(self):
|
||||
return self._connection.stmtcachesize
|
||||
|
||||
@stmtcachesize.setter
|
||||
def stmtcachesize(self, value):
|
||||
self._connection.stmtcachesize = value
|
||||
|
||||
@property
|
||||
def max_identifier_length(self):
|
||||
return self._connection.max_identifier_length
|
||||
|
||||
def cursor(self):
|
||||
return AsyncAdapt_oracledb_cursor(self)
|
||||
|
||||
def ss_cursor(self):
|
||||
return AsyncAdapt_oracledb_ss_cursor(self)
|
||||
|
||||
def xid(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self._connection.xid(*args, **kwargs)
|
||||
|
||||
def tpc_begin(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.await_(self._connection.tpc_begin(*args, **kwargs))
|
||||
|
||||
def tpc_commit(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.await_(self._connection.tpc_commit(*args, **kwargs))
|
||||
|
||||
def tpc_prepare(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.await_(self._connection.tpc_prepare(*args, **kwargs))
|
||||
|
||||
def tpc_recover(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.await_(self._connection.tpc_recover(*args, **kwargs))
|
||||
|
||||
def tpc_rollback(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.await_(self._connection.tpc_rollback(*args, **kwargs))
|
||||
|
||||
|
||||
class AsyncAdaptFallback_oracledb_connection(
|
||||
AsyncAdaptFallback_dbapi_connection, AsyncAdapt_oracledb_connection
|
||||
):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class OracledbAdaptDBAPI:
|
||||
def __init__(self, oracledb) -> None:
|
||||
self.oracledb = oracledb
|
||||
|
||||
for k, v in self.oracledb.__dict__.items():
|
||||
if k != "connect":
|
||||
self.__dict__[k] = v
|
||||
|
||||
def connect(self, *arg, **kw):
|
||||
async_fallback = kw.pop("async_fallback", False)
|
||||
creator_fn = kw.pop("async_creator_fn", self.oracledb.connect_async)
|
||||
|
||||
if asbool(async_fallback):
|
||||
return AsyncAdaptFallback_oracledb_connection(
|
||||
self, await_fallback(creator_fn(*arg, **kw))
|
||||
)
|
||||
|
||||
else:
|
||||
return AsyncAdapt_oracledb_connection(
|
||||
self, await_only(creator_fn(*arg, **kw))
|
||||
)
|
||||
|
||||
|
||||
class OracleExecutionContextAsync_oracledb(OracleExecutionContext_oracledb):
|
||||
# restore default create cursor
|
||||
create_cursor = default.DefaultExecutionContext.create_cursor
|
||||
|
||||
def create_default_cursor(self):
|
||||
# copy of OracleExecutionContext_cx_oracle.create_cursor
|
||||
c = self._dbapi_connection.cursor()
|
||||
if self.dialect.arraysize:
|
||||
c.arraysize = self.dialect.arraysize
|
||||
|
||||
return c
|
||||
|
||||
def create_server_side_cursor(self):
|
||||
c = self._dbapi_connection.ss_cursor()
|
||||
if self.dialect.arraysize:
|
||||
c.arraysize = self.dialect.arraysize
|
||||
|
||||
return c
|
||||
|
||||
|
||||
class OracleDialectAsync_oracledb(OracleDialect_oracledb):
|
||||
is_async = True
|
||||
supports_server_side_cursors = True
|
||||
supports_statement_cache = True
|
||||
execution_ctx_cls = OracleExecutionContextAsync_oracledb
|
||||
|
||||
_min_version = (2,)
|
||||
|
||||
# thick_mode mode is not supported by asyncio, oracledb will raise
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
import oracledb
|
||||
|
||||
return OracledbAdaptDBAPI(oracledb)
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
async_fallback = url.query.get("async_fallback", False)
|
||||
|
||||
if asbool(async_fallback):
|
||||
return pool.FallbackAsyncAdaptedQueuePool
|
||||
else:
|
||||
return pool.AsyncAdaptedQueuePool
|
||||
|
||||
def get_driver_connection(self, connection):
|
||||
return connection._connection
|
||||
|
||||
|
||||
dialect = OracleDialect_oracledb
|
||||
dialect_async = OracleDialectAsync_oracledb
|
@ -0,0 +1,220 @@
|
||||
# dialects/oracle/provision.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import create_engine
|
||||
from ... import exc
|
||||
from ... import inspect
|
||||
from ...engine import url as sa_url
|
||||
from ...testing.provision import configure_follower
|
||||
from ...testing.provision import create_db
|
||||
from ...testing.provision import drop_all_schema_objects_post_tables
|
||||
from ...testing.provision import drop_all_schema_objects_pre_tables
|
||||
from ...testing.provision import drop_db
|
||||
from ...testing.provision import follower_url_from_main
|
||||
from ...testing.provision import log
|
||||
from ...testing.provision import post_configure_engine
|
||||
from ...testing.provision import run_reap_dbs
|
||||
from ...testing.provision import set_default_schema_on_connection
|
||||
from ...testing.provision import stop_test_class_outside_fixtures
|
||||
from ...testing.provision import temp_table_keyword_args
|
||||
from ...testing.provision import update_db_opts
|
||||
|
||||
|
||||
@create_db.for_db("oracle")
|
||||
def _oracle_create_db(cfg, eng, ident):
|
||||
# NOTE: make sure you've run "ALTER DATABASE default tablespace users" or
|
||||
# similar, so that the default tablespace is not "system"; reflection will
|
||||
# fail otherwise
|
||||
with eng.begin() as conn:
|
||||
conn.exec_driver_sql("create user %s identified by xe" % ident)
|
||||
conn.exec_driver_sql("create user %s_ts1 identified by xe" % ident)
|
||||
conn.exec_driver_sql("create user %s_ts2 identified by xe" % ident)
|
||||
conn.exec_driver_sql("grant dba to %s" % (ident,))
|
||||
conn.exec_driver_sql("grant unlimited tablespace to %s" % ident)
|
||||
conn.exec_driver_sql("grant unlimited tablespace to %s_ts1" % ident)
|
||||
conn.exec_driver_sql("grant unlimited tablespace to %s_ts2" % ident)
|
||||
# these are needed to create materialized views
|
||||
conn.exec_driver_sql("grant create table to %s" % ident)
|
||||
conn.exec_driver_sql("grant create table to %s_ts1" % ident)
|
||||
conn.exec_driver_sql("grant create table to %s_ts2" % ident)
|
||||
|
||||
|
||||
@configure_follower.for_db("oracle")
|
||||
def _oracle_configure_follower(config, ident):
|
||||
config.test_schema = "%s_ts1" % ident
|
||||
config.test_schema_2 = "%s_ts2" % ident
|
||||
|
||||
|
||||
def _ora_drop_ignore(conn, dbname):
|
||||
try:
|
||||
conn.exec_driver_sql("drop user %s cascade" % dbname)
|
||||
log.info("Reaped db: %s", dbname)
|
||||
return True
|
||||
except exc.DatabaseError as err:
|
||||
log.warning("couldn't drop db: %s", err)
|
||||
return False
|
||||
|
||||
|
||||
@drop_all_schema_objects_pre_tables.for_db("oracle")
|
||||
def _ora_drop_all_schema_objects_pre_tables(cfg, eng):
|
||||
_purge_recyclebin(eng)
|
||||
_purge_recyclebin(eng, cfg.test_schema)
|
||||
|
||||
|
||||
@drop_all_schema_objects_post_tables.for_db("oracle")
|
||||
def _ora_drop_all_schema_objects_post_tables(cfg, eng):
|
||||
with eng.begin() as conn:
|
||||
for syn in conn.dialect._get_synonyms(conn, None, None, None):
|
||||
conn.exec_driver_sql(f"drop synonym {syn['synonym_name']}")
|
||||
|
||||
for syn in conn.dialect._get_synonyms(
|
||||
conn, cfg.test_schema, None, None
|
||||
):
|
||||
conn.exec_driver_sql(
|
||||
f"drop synonym {cfg.test_schema}.{syn['synonym_name']}"
|
||||
)
|
||||
|
||||
for tmp_table in inspect(conn).get_temp_table_names():
|
||||
conn.exec_driver_sql(f"drop table {tmp_table}")
|
||||
|
||||
|
||||
@drop_db.for_db("oracle")
|
||||
def _oracle_drop_db(cfg, eng, ident):
|
||||
with eng.begin() as conn:
|
||||
# cx_Oracle seems to occasionally leak open connections when a large
|
||||
# suite it run, even if we confirm we have zero references to
|
||||
# connection objects.
|
||||
# while there is a "kill session" command in Oracle Database,
|
||||
# it unfortunately does not release the connection sufficiently.
|
||||
_ora_drop_ignore(conn, ident)
|
||||
_ora_drop_ignore(conn, "%s_ts1" % ident)
|
||||
_ora_drop_ignore(conn, "%s_ts2" % ident)
|
||||
|
||||
|
||||
@stop_test_class_outside_fixtures.for_db("oracle")
|
||||
def _ora_stop_test_class_outside_fixtures(config, db, cls):
|
||||
try:
|
||||
_purge_recyclebin(db)
|
||||
except exc.DatabaseError as err:
|
||||
log.warning("purge recyclebin command failed: %s", err)
|
||||
|
||||
# clear statement cache on all connections that were used
|
||||
# https://github.com/oracle/python-cx_Oracle/issues/519
|
||||
|
||||
for cx_oracle_conn in _all_conns:
|
||||
try:
|
||||
sc = cx_oracle_conn.stmtcachesize
|
||||
except db.dialect.dbapi.InterfaceError:
|
||||
# connection closed
|
||||
pass
|
||||
else:
|
||||
cx_oracle_conn.stmtcachesize = 0
|
||||
cx_oracle_conn.stmtcachesize = sc
|
||||
_all_conns.clear()
|
||||
|
||||
|
||||
def _purge_recyclebin(eng, schema=None):
|
||||
with eng.begin() as conn:
|
||||
if schema is None:
|
||||
# run magic command to get rid of identity sequences
|
||||
# https://floo.bar/2019/11/29/drop-the-underlying-sequence-of-an-identity-column/ # noqa: E501
|
||||
conn.exec_driver_sql("purge recyclebin")
|
||||
else:
|
||||
# per user: https://community.oracle.com/tech/developers/discussion/2255402/how-to-clear-dba-recyclebin-for-a-particular-user # noqa: E501
|
||||
for owner, object_name, type_ in conn.exec_driver_sql(
|
||||
"select owner, object_name,type from "
|
||||
"dba_recyclebin where owner=:schema and type='TABLE'",
|
||||
{"schema": conn.dialect.denormalize_name(schema)},
|
||||
).all():
|
||||
conn.exec_driver_sql(f'purge {type_} {owner}."{object_name}"')
|
||||
|
||||
|
||||
_all_conns = set()
|
||||
|
||||
|
||||
@post_configure_engine.for_db("oracle")
|
||||
def _oracle_post_configure_engine(url, engine, follower_ident):
|
||||
from sqlalchemy import event
|
||||
|
||||
@event.listens_for(engine, "checkout")
|
||||
def checkout(dbapi_con, con_record, con_proxy):
|
||||
_all_conns.add(dbapi_con)
|
||||
|
||||
@event.listens_for(engine, "checkin")
|
||||
def checkin(dbapi_connection, connection_record):
|
||||
# work around cx_Oracle issue:
|
||||
# https://github.com/oracle/python-cx_Oracle/issues/530
|
||||
# invalidate oracle connections that had 2pc set up
|
||||
if "cx_oracle_xid" in connection_record.info:
|
||||
connection_record.invalidate()
|
||||
|
||||
|
||||
@run_reap_dbs.for_db("oracle")
|
||||
def _reap_oracle_dbs(url, idents):
|
||||
log.info("db reaper connecting to %r", url)
|
||||
eng = create_engine(url)
|
||||
with eng.begin() as conn:
|
||||
log.info("identifiers in file: %s", ", ".join(idents))
|
||||
|
||||
to_reap = conn.exec_driver_sql(
|
||||
"select u.username from all_users u where username "
|
||||
"like 'TEST_%' and not exists (select username "
|
||||
"from v$session where username=u.username)"
|
||||
)
|
||||
all_names = {username.lower() for (username,) in to_reap}
|
||||
to_drop = set()
|
||||
for name in all_names:
|
||||
if name.endswith("_ts1") or name.endswith("_ts2"):
|
||||
continue
|
||||
elif name in idents:
|
||||
to_drop.add(name)
|
||||
if "%s_ts1" % name in all_names:
|
||||
to_drop.add("%s_ts1" % name)
|
||||
if "%s_ts2" % name in all_names:
|
||||
to_drop.add("%s_ts2" % name)
|
||||
|
||||
dropped = total = 0
|
||||
for total, username in enumerate(to_drop, 1):
|
||||
if _ora_drop_ignore(conn, username):
|
||||
dropped += 1
|
||||
log.info(
|
||||
"Dropped %d out of %d stale databases detected", dropped, total
|
||||
)
|
||||
|
||||
|
||||
@follower_url_from_main.for_db("oracle")
|
||||
def _oracle_follower_url_from_main(url, ident):
|
||||
url = sa_url.make_url(url)
|
||||
return url.set(username=ident, password="xe")
|
||||
|
||||
|
||||
@temp_table_keyword_args.for_db("oracle")
|
||||
def _oracle_temp_table_keyword_args(cfg, eng):
|
||||
return {
|
||||
"prefixes": ["GLOBAL TEMPORARY"],
|
||||
"oracle_on_commit": "PRESERVE ROWS",
|
||||
}
|
||||
|
||||
|
||||
@set_default_schema_on_connection.for_db("oracle")
|
||||
def _oracle_set_default_schema_on_connection(
|
||||
cfg, dbapi_connection, schema_name
|
||||
):
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("ALTER SESSION SET CURRENT_SCHEMA=%s" % schema_name)
|
||||
cursor.close()
|
||||
|
||||
|
||||
@update_db_opts.for_db("oracle")
|
||||
def _update_db_opts(db_url, db_opts, options):
|
||||
"""Set database options (db_opts) for a test database that we created."""
|
||||
if (
|
||||
options.oracledb_thick_mode
|
||||
and sa_url.make_url(db_url).get_driver_name() == "oracledb"
|
||||
):
|
||||
db_opts["thick_mode"] = True
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user