Update 2025-04-13_16:25:39

This commit is contained in:
root
2025-04-13 16:25:41 +02:00
commit 4c711360d3
2979 changed files with 666585 additions and 0 deletions

View File

@ -0,0 +1,104 @@
# dialects/mysql/__init__.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from . import aiomysql # noqa
from . import asyncmy # noqa
from . import base # noqa
from . import cymysql # noqa
from . import mariadbconnector # noqa
from . import mysqlconnector # noqa
from . import mysqldb # noqa
from . import pymysql # noqa
from . import pyodbc # noqa
from .base import BIGINT
from .base import BINARY
from .base import BIT
from .base import BLOB
from .base import BOOLEAN
from .base import CHAR
from .base import DATE
from .base import DATETIME
from .base import DECIMAL
from .base import DOUBLE
from .base import ENUM
from .base import FLOAT
from .base import INTEGER
from .base import JSON
from .base import LONGBLOB
from .base import LONGTEXT
from .base import MEDIUMBLOB
from .base import MEDIUMINT
from .base import MEDIUMTEXT
from .base import NCHAR
from .base import NUMERIC
from .base import NVARCHAR
from .base import REAL
from .base import SET
from .base import SMALLINT
from .base import TEXT
from .base import TIME
from .base import TIMESTAMP
from .base import TINYBLOB
from .base import TINYINT
from .base import TINYTEXT
from .base import VARBINARY
from .base import VARCHAR
from .base import YEAR
from .dml import Insert
from .dml import insert
from .expression import match
from .mariadb import INET4
from .mariadb import INET6
# default dialect
base.dialect = dialect = mysqldb.dialect
__all__ = (
"BIGINT",
"BINARY",
"BIT",
"BLOB",
"BOOLEAN",
"CHAR",
"DATE",
"DATETIME",
"DECIMAL",
"DOUBLE",
"ENUM",
"FLOAT",
"INET4",
"INET6",
"INTEGER",
"INTEGER",
"JSON",
"LONGBLOB",
"LONGTEXT",
"MEDIUMBLOB",
"MEDIUMINT",
"MEDIUMTEXT",
"NCHAR",
"NVARCHAR",
"NUMERIC",
"SET",
"SMALLINT",
"REAL",
"TEXT",
"TIME",
"TIMESTAMP",
"TINYBLOB",
"TINYINT",
"TINYTEXT",
"VARBINARY",
"VARCHAR",
"YEAR",
"dialect",
"insert",
"Insert",
"match",
)

View File

@ -0,0 +1,335 @@
# dialects/mysql/aiomysql.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
# file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+aiomysql
:name: aiomysql
:dbapi: aiomysql
:connectstring: mysql+aiomysql://user:password@host:port/dbname[?key=value&key=value...]
:url: https://github.com/aio-libs/aiomysql
The aiomysql dialect is SQLAlchemy's second Python asyncio dialect.
Using a special asyncio mediation layer, the aiomysql dialect is usable
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
extension package.
This dialect should normally be used only with the
:func:`_asyncio.create_async_engine` engine creation function::
from sqlalchemy.ext.asyncio import create_async_engine
engine = create_async_engine(
"mysql+aiomysql://user:pass@hostname/dbname?charset=utf8mb4"
)
""" # noqa
from collections import deque
from .pymysql import MySQLDialect_pymysql
from ... import pool
from ... import util
from ...engine import AdaptedConnection
from ...util.concurrency import asyncio
from ...util.concurrency import await_fallback
from ...util.concurrency import await_only
class AsyncAdapt_aiomysql_cursor:
# TODO: base on connectors/asyncio.py
# see #10415
server_side = False
__slots__ = (
"_adapt_connection",
"_connection",
"await_",
"_cursor",
"_rows",
)
def __init__(self, adapt_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor(adapt_connection.dbapi.Cursor)
# see https://github.com/aio-libs/aiomysql/issues/543
self._cursor = self.await_(cursor.__aenter__())
self._rows = deque()
@property
def description(self):
return self._cursor.description
@property
def rowcount(self):
return self._cursor.rowcount
@property
def arraysize(self):
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value):
self._cursor.arraysize = value
@property
def lastrowid(self):
return self._cursor.lastrowid
def close(self):
# note we aren't actually closing the cursor here,
# we are just letting GC do it. to allow this to be async
# we would need the Result to change how it does "Safe close cursor".
# MySQL "cursors" don't actually have state to be "closed" besides
# exhausting rows, which we already have done for sync cursor.
# another option would be to emulate aiosqlite dialect and assign
# cursor only if we are doing server side cursor operation.
self._rows.clear()
def execute(self, operation, parameters=None):
return self.await_(self._execute_async(operation, parameters))
def executemany(self, operation, seq_of_parameters):
return self.await_(
self._executemany_async(operation, seq_of_parameters)
)
async def _execute_async(self, operation, parameters):
async with self._adapt_connection._execute_mutex:
result = await self._cursor.execute(operation, parameters)
if not self.server_side:
# aiomysql has a "fake" async result, so we have to pull it out
# of that here since our default result is not async.
# we could just as easily grab "_rows" here and be done with it
# but this is safer.
self._rows = deque(await self._cursor.fetchall())
return result
async def _executemany_async(self, operation, seq_of_parameters):
async with self._adapt_connection._execute_mutex:
return await self._cursor.executemany(operation, seq_of_parameters)
def setinputsizes(self, *inputsizes):
pass
def __iter__(self):
while self._rows:
yield self._rows.popleft()
def fetchone(self):
if self._rows:
return self._rows.popleft()
else:
return None
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
rr = self._rows
return [rr.popleft() for _ in range(min(size, len(rr)))]
def fetchall(self):
retval = list(self._rows)
self._rows.clear()
return retval
class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
# TODO: base on connectors/asyncio.py
# see #10415
__slots__ = ()
server_side = True
def __init__(self, adapt_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor(adapt_connection.dbapi.SSCursor)
self._cursor = self.await_(cursor.__aenter__())
def close(self):
if self._cursor is not None:
self.await_(self._cursor.close())
self._cursor = None
def fetchone(self):
return self.await_(self._cursor.fetchone())
def fetchmany(self, size=None):
return self.await_(self._cursor.fetchmany(size=size))
def fetchall(self):
return self.await_(self._cursor.fetchall())
class AsyncAdapt_aiomysql_connection(AdaptedConnection):
# TODO: base on connectors/asyncio.py
# see #10415
await_ = staticmethod(await_only)
__slots__ = ("dbapi", "_execute_mutex")
def __init__(self, dbapi, connection):
self.dbapi = dbapi
self._connection = connection
self._execute_mutex = asyncio.Lock()
def ping(self, reconnect):
return self.await_(self._connection.ping(reconnect))
def character_set_name(self):
return self._connection.character_set_name()
def autocommit(self, value):
self.await_(self._connection.autocommit(value))
def cursor(self, server_side=False):
if server_side:
return AsyncAdapt_aiomysql_ss_cursor(self)
else:
return AsyncAdapt_aiomysql_cursor(self)
def rollback(self):
self.await_(self._connection.rollback())
def commit(self):
self.await_(self._connection.commit())
def terminate(self):
# it's not awaitable.
self._connection.close()
def close(self) -> None:
self.await_(self._connection.ensure_closed())
class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection):
# TODO: base on connectors/asyncio.py
# see #10415
__slots__ = ()
await_ = staticmethod(await_fallback)
class AsyncAdapt_aiomysql_dbapi:
def __init__(self, aiomysql, pymysql):
self.aiomysql = aiomysql
self.pymysql = pymysql
self.paramstyle = "format"
self._init_dbapi_attributes()
self.Cursor, self.SSCursor = self._init_cursors_subclasses()
def _init_dbapi_attributes(self):
for name in (
"Warning",
"Error",
"InterfaceError",
"DataError",
"DatabaseError",
"OperationalError",
"InterfaceError",
"IntegrityError",
"ProgrammingError",
"InternalError",
"NotSupportedError",
):
setattr(self, name, getattr(self.aiomysql, name))
for name in (
"NUMBER",
"STRING",
"DATETIME",
"BINARY",
"TIMESTAMP",
"Binary",
):
setattr(self, name, getattr(self.pymysql, name))
def connect(self, *arg, **kw):
async_fallback = kw.pop("async_fallback", False)
creator_fn = kw.pop("async_creator_fn", self.aiomysql.connect)
if util.asbool(async_fallback):
return AsyncAdaptFallback_aiomysql_connection(
self,
await_fallback(creator_fn(*arg, **kw)),
)
else:
return AsyncAdapt_aiomysql_connection(
self,
await_only(creator_fn(*arg, **kw)),
)
def _init_cursors_subclasses(self):
# suppress unconditional warning emitted by aiomysql
class Cursor(self.aiomysql.Cursor):
async def _show_warnings(self, conn):
pass
class SSCursor(self.aiomysql.SSCursor):
async def _show_warnings(self, conn):
pass
return Cursor, SSCursor
class MySQLDialect_aiomysql(MySQLDialect_pymysql):
driver = "aiomysql"
supports_statement_cache = True
supports_server_side_cursors = True
_sscursor = AsyncAdapt_aiomysql_ss_cursor
is_async = True
has_terminate = True
@classmethod
def import_dbapi(cls):
return AsyncAdapt_aiomysql_dbapi(
__import__("aiomysql"), __import__("pymysql")
)
@classmethod
def get_pool_class(cls, url):
async_fallback = url.query.get("async_fallback", False)
if util.asbool(async_fallback):
return pool.FallbackAsyncAdaptedQueuePool
else:
return pool.AsyncAdaptedQueuePool
def do_terminate(self, dbapi_connection) -> None:
dbapi_connection.terminate()
def create_connect_args(self, url):
return super().create_connect_args(
url, _translate_args=dict(username="user", database="db")
)
def is_disconnect(self, e, connection, cursor):
if super().is_disconnect(e, connection, cursor):
return True
else:
str_e = str(e).lower()
return "not connected" in str_e
def _found_rows_client_flag(self):
from pymysql.constants import CLIENT
return CLIENT.FOUND_ROWS
def get_driver_connection(self, connection):
return connection._connection
dialect = MySQLDialect_aiomysql

View File

@ -0,0 +1,339 @@
# dialects/mysql/asyncmy.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
# file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+asyncmy
:name: asyncmy
:dbapi: asyncmy
:connectstring: mysql+asyncmy://user:password@host:port/dbname[?key=value&key=value...]
:url: https://github.com/long2ice/asyncmy
Using a special asyncio mediation layer, the asyncmy dialect is usable
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
extension package.
This dialect should normally be used only with the
:func:`_asyncio.create_async_engine` engine creation function::
from sqlalchemy.ext.asyncio import create_async_engine
engine = create_async_engine(
"mysql+asyncmy://user:pass@hostname/dbname?charset=utf8mb4"
)
""" # noqa
from collections import deque
from contextlib import asynccontextmanager
from .pymysql import MySQLDialect_pymysql
from ... import pool
from ... import util
from ...engine import AdaptedConnection
from ...util.concurrency import asyncio
from ...util.concurrency import await_fallback
from ...util.concurrency import await_only
class AsyncAdapt_asyncmy_cursor:
# TODO: base on connectors/asyncio.py
# see #10415
server_side = False
__slots__ = (
"_adapt_connection",
"_connection",
"await_",
"_cursor",
"_rows",
)
def __init__(self, adapt_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor()
self._cursor = self.await_(cursor.__aenter__())
self._rows = deque()
@property
def description(self):
return self._cursor.description
@property
def rowcount(self):
return self._cursor.rowcount
@property
def arraysize(self):
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value):
self._cursor.arraysize = value
@property
def lastrowid(self):
return self._cursor.lastrowid
def close(self):
# note we aren't actually closing the cursor here,
# we are just letting GC do it. to allow this to be async
# we would need the Result to change how it does "Safe close cursor".
# MySQL "cursors" don't actually have state to be "closed" besides
# exhausting rows, which we already have done for sync cursor.
# another option would be to emulate aiosqlite dialect and assign
# cursor only if we are doing server side cursor operation.
self._rows.clear()
def execute(self, operation, parameters=None):
return self.await_(self._execute_async(operation, parameters))
def executemany(self, operation, seq_of_parameters):
return self.await_(
self._executemany_async(operation, seq_of_parameters)
)
async def _execute_async(self, operation, parameters):
async with self._adapt_connection._mutex_and_adapt_errors():
if parameters is None:
result = await self._cursor.execute(operation)
else:
result = await self._cursor.execute(operation, parameters)
if not self.server_side:
# asyncmy has a "fake" async result, so we have to pull it out
# of that here since our default result is not async.
# we could just as easily grab "_rows" here and be done with it
# but this is safer.
self._rows = deque(await self._cursor.fetchall())
return result
async def _executemany_async(self, operation, seq_of_parameters):
async with self._adapt_connection._mutex_and_adapt_errors():
return await self._cursor.executemany(operation, seq_of_parameters)
def setinputsizes(self, *inputsizes):
pass
def __iter__(self):
while self._rows:
yield self._rows.popleft()
def fetchone(self):
if self._rows:
return self._rows.popleft()
else:
return None
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
rr = self._rows
return [rr.popleft() for _ in range(min(size, len(rr)))]
def fetchall(self):
retval = list(self._rows)
self._rows.clear()
return retval
class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
# TODO: base on connectors/asyncio.py
# see #10415
__slots__ = ()
server_side = True
def __init__(self, adapt_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor(
adapt_connection.dbapi.asyncmy.cursors.SSCursor
)
self._cursor = self.await_(cursor.__aenter__())
def close(self):
if self._cursor is not None:
self.await_(self._cursor.close())
self._cursor = None
def fetchone(self):
return self.await_(self._cursor.fetchone())
def fetchmany(self, size=None):
return self.await_(self._cursor.fetchmany(size=size))
def fetchall(self):
return self.await_(self._cursor.fetchall())
class AsyncAdapt_asyncmy_connection(AdaptedConnection):
# TODO: base on connectors/asyncio.py
# see #10415
await_ = staticmethod(await_only)
__slots__ = ("dbapi", "_execute_mutex")
def __init__(self, dbapi, connection):
self.dbapi = dbapi
self._connection = connection
self._execute_mutex = asyncio.Lock()
@asynccontextmanager
async def _mutex_and_adapt_errors(self):
async with self._execute_mutex:
try:
yield
except AttributeError:
raise self.dbapi.InternalError(
"network operation failed due to asyncmy attribute error"
)
def ping(self, reconnect):
assert not reconnect
return self.await_(self._do_ping())
async def _do_ping(self):
async with self._mutex_and_adapt_errors():
return await self._connection.ping(False)
def character_set_name(self):
return self._connection.character_set_name()
def autocommit(self, value):
self.await_(self._connection.autocommit(value))
def cursor(self, server_side=False):
if server_side:
return AsyncAdapt_asyncmy_ss_cursor(self)
else:
return AsyncAdapt_asyncmy_cursor(self)
def rollback(self):
self.await_(self._connection.rollback())
def commit(self):
self.await_(self._connection.commit())
def terminate(self):
# it's not awaitable.
self._connection.close()
def close(self) -> None:
self.await_(self._connection.ensure_closed())
class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection):
__slots__ = ()
await_ = staticmethod(await_fallback)
def _Binary(x):
"""Return x as a binary type."""
return bytes(x)
class AsyncAdapt_asyncmy_dbapi:
def __init__(self, asyncmy):
self.asyncmy = asyncmy
self.paramstyle = "format"
self._init_dbapi_attributes()
def _init_dbapi_attributes(self):
for name in (
"Warning",
"Error",
"InterfaceError",
"DataError",
"DatabaseError",
"OperationalError",
"InterfaceError",
"IntegrityError",
"ProgrammingError",
"InternalError",
"NotSupportedError",
):
setattr(self, name, getattr(self.asyncmy.errors, name))
STRING = util.symbol("STRING")
NUMBER = util.symbol("NUMBER")
BINARY = util.symbol("BINARY")
DATETIME = util.symbol("DATETIME")
TIMESTAMP = util.symbol("TIMESTAMP")
Binary = staticmethod(_Binary)
def connect(self, *arg, **kw):
async_fallback = kw.pop("async_fallback", False)
creator_fn = kw.pop("async_creator_fn", self.asyncmy.connect)
if util.asbool(async_fallback):
return AsyncAdaptFallback_asyncmy_connection(
self,
await_fallback(creator_fn(*arg, **kw)),
)
else:
return AsyncAdapt_asyncmy_connection(
self,
await_only(creator_fn(*arg, **kw)),
)
class MySQLDialect_asyncmy(MySQLDialect_pymysql):
driver = "asyncmy"
supports_statement_cache = True
supports_server_side_cursors = True
_sscursor = AsyncAdapt_asyncmy_ss_cursor
is_async = True
has_terminate = True
@classmethod
def import_dbapi(cls):
return AsyncAdapt_asyncmy_dbapi(__import__("asyncmy"))
@classmethod
def get_pool_class(cls, url):
async_fallback = url.query.get("async_fallback", False)
if util.asbool(async_fallback):
return pool.FallbackAsyncAdaptedQueuePool
else:
return pool.AsyncAdaptedQueuePool
def do_terminate(self, dbapi_connection) -> None:
dbapi_connection.terminate()
def create_connect_args(self, url):
return super().create_connect_args(
url, _translate_args=dict(username="user", database="db")
)
def is_disconnect(self, e, connection, cursor):
if super().is_disconnect(e, connection, cursor):
return True
else:
str_e = str(e).lower()
return (
"not connected" in str_e or "network operation failed" in str_e
)
def _found_rows_client_flag(self):
from asyncmy.constants import CLIENT
return CLIENT.FOUND_ROWS
def get_driver_connection(self, connection):
return connection._connection
dialect = MySQLDialect_asyncmy

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,84 @@
# dialects/mysql/cymysql.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+cymysql
:name: CyMySQL
:dbapi: cymysql
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>[?<options>]
:url: https://github.com/nakagami/CyMySQL
.. note::
The CyMySQL dialect is **not tested as part of SQLAlchemy's continuous
integration** and may have unresolved issues. The recommended MySQL
dialects are mysqlclient and PyMySQL.
""" # noqa
from .base import BIT
from .base import MySQLDialect
from .mysqldb import MySQLDialect_mysqldb
from ... import util
class _cymysqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""Convert MySQL's 64 bit, variable length binary string to a long."""
def process(value):
if value is not None:
v = 0
for i in iter(value):
v = v << 8 | i
return v
return value
return process
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = "cymysql"
supports_statement_cache = True
description_encoding = None
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
supports_unicode_statements = True
colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT})
@classmethod
def import_dbapi(cls):
return __import__("cymysql")
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.OperationalError):
return self._extract_error_code(e) in (
2006,
2013,
2014,
2045,
2055,
)
elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
return True
else:
return False
dialect = MySQLDialect_cymysql

View File

@ -0,0 +1,225 @@
# dialects/mysql/dml.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from __future__ import annotations
from typing import Any
from typing import Dict
from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import Union
from ... import exc
from ... import util
from ...sql._typing import _DMLTableArgument
from ...sql.base import _exclusive_against
from ...sql.base import _generative
from ...sql.base import ColumnCollection
from ...sql.base import ReadOnlyColumnCollection
from ...sql.dml import Insert as StandardInsert
from ...sql.elements import ClauseElement
from ...sql.elements import KeyedColumnElement
from ...sql.expression import alias
from ...sql.selectable import NamedFromClause
from ...util.typing import Self
__all__ = ("Insert", "insert")
def insert(table: _DMLTableArgument) -> Insert:
"""Construct a MySQL/MariaDB-specific variant :class:`_mysql.Insert`
construct.
.. container:: inherited_member
The :func:`sqlalchemy.dialects.mysql.insert` function creates
a :class:`sqlalchemy.dialects.mysql.Insert`. This class is based
on the dialect-agnostic :class:`_sql.Insert` construct which may
be constructed using the :func:`_sql.insert` function in
SQLAlchemy Core.
The :class:`_mysql.Insert` construct includes additional methods
:meth:`_mysql.Insert.on_duplicate_key_update`.
"""
return Insert(table)
class Insert(StandardInsert):
"""MySQL-specific implementation of INSERT.
Adds methods for MySQL-specific syntaxes such as ON DUPLICATE KEY UPDATE.
The :class:`~.mysql.Insert` object is created using the
:func:`sqlalchemy.dialects.mysql.insert` function.
.. versionadded:: 1.2
"""
stringify_dialect = "mysql"
inherit_cache = False
@property
def inserted(
self,
) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
"""Provide the "inserted" namespace for an ON DUPLICATE KEY UPDATE
statement
MySQL's ON DUPLICATE KEY UPDATE clause allows reference to the row
that would be inserted, via a special function called ``VALUES()``.
This attribute provides all columns in this row to be referenceable
such that they will render within a ``VALUES()`` function inside the
ON DUPLICATE KEY UPDATE clause. The attribute is named ``.inserted``
so as not to conflict with the existing
:meth:`_expression.Insert.values` method.
.. tip:: The :attr:`_mysql.Insert.inserted` attribute is an instance
of :class:`_expression.ColumnCollection`, which provides an
interface the same as that of the :attr:`_schema.Table.c`
collection described at :ref:`metadata_tables_and_columns`.
With this collection, ordinary names are accessible like attributes
(e.g. ``stmt.inserted.some_column``), but special names and
dictionary method names should be accessed using indexed access,
such as ``stmt.inserted["column name"]`` or
``stmt.inserted["values"]``. See the docstring for
:class:`_expression.ColumnCollection` for further examples.
.. seealso::
:ref:`mysql_insert_on_duplicate_key_update` - example of how
to use :attr:`_expression.Insert.inserted`
"""
return self.inserted_alias.columns
@util.memoized_property
def inserted_alias(self) -> NamedFromClause:
return alias(self.table, name="inserted")
@_generative
@_exclusive_against(
"_post_values_clause",
msgs={
"_post_values_clause": "This Insert construct already "
"has an ON DUPLICATE KEY clause present"
},
)
def on_duplicate_key_update(self, *args: _UpdateArg, **kw: Any) -> Self:
r"""
Specifies the ON DUPLICATE KEY UPDATE clause.
:param \**kw: Column keys linked to UPDATE values. The
values may be any SQL expression or supported literal Python
values.
.. warning:: This dictionary does **not** take into account
Python-specified default UPDATE values or generation functions,
e.g. those specified using :paramref:`_schema.Column.onupdate`.
These values will not be exercised for an ON DUPLICATE KEY UPDATE
style of UPDATE, unless values are manually specified here.
:param \*args: As an alternative to passing key/value parameters,
a dictionary or list of 2-tuples can be passed as a single positional
argument.
Passing a single dictionary is equivalent to the keyword argument
form::
insert().on_duplicate_key_update({"name": "some name"})
Passing a list of 2-tuples indicates that the parameter assignments
in the UPDATE clause should be ordered as sent, in a manner similar
to that described for the :class:`_expression.Update`
construct overall
in :ref:`tutorial_parameter_ordered_updates`::
insert().on_duplicate_key_update(
[
("name", "some name"),
("value", "some value"),
]
)
.. versionchanged:: 1.3 parameters can be specified as a dictionary
or list of 2-tuples; the latter form provides for parameter
ordering.
.. versionadded:: 1.2
.. seealso::
:ref:`mysql_insert_on_duplicate_key_update`
"""
if args and kw:
raise exc.ArgumentError(
"Can't pass kwargs and positional arguments simultaneously"
)
if args:
if len(args) > 1:
raise exc.ArgumentError(
"Only a single dictionary or list of tuples "
"is accepted positionally."
)
values = args[0]
else:
values = kw
self._post_values_clause = OnDuplicateClause(
self.inserted_alias, values
)
return self
class OnDuplicateClause(ClauseElement):
__visit_name__ = "on_duplicate_key_update"
_parameter_ordering: Optional[List[str]] = None
update: Dict[str, Any]
stringify_dialect = "mysql"
def __init__(
self, inserted_alias: NamedFromClause, update: _UpdateArg
) -> None:
self.inserted_alias = inserted_alias
# auto-detect that parameters should be ordered. This is copied from
# Update._proces_colparams(), however we don't look for a special flag
# in this case since we are not disambiguating from other use cases as
# we are in Update.values().
if isinstance(update, list) and (
update and isinstance(update[0], tuple)
):
self._parameter_ordering = [key for key, value in update]
update = dict(update)
if isinstance(update, dict):
if not update:
raise ValueError(
"update parameter dictionary must not be empty"
)
elif isinstance(update, ColumnCollection):
update = dict(update)
else:
raise ValueError(
"update parameter must be a non-empty dictionary "
"or a ColumnCollection such as the `.c.` collection "
"of a Table object"
)
self.update = update
_UpdateArg = Union[
Mapping[Any, Any], List[Tuple[str, Any]], ColumnCollection[Any, Any]
]

View File

@ -0,0 +1,243 @@
# dialects/mysql/enumerated.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
import re
from .types import _StringType
from ... import exc
from ... import sql
from ... import util
from ...sql import sqltypes
class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType):
"""MySQL ENUM type."""
__visit_name__ = "ENUM"
native_enum = True
def __init__(self, *enums, **kw):
"""Construct an ENUM.
E.g.::
Column("myenum", ENUM("foo", "bar", "baz"))
:param enums: The range of valid values for this ENUM. Values in
enums are not quoted, they will be escaped and surrounded by single
quotes when generating the schema. This object may also be a
PEP-435-compliant enumerated type.
.. versionadded: 1.1 added support for PEP-435-compliant enumerated
types.
:param strict: This flag has no effect.
.. versionchanged:: The MySQL ENUM type as well as the base Enum
type now validates all Python data values.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
kw.pop("strict", None)
self._enum_init(enums, kw)
_StringType.__init__(self, length=self.length, **kw)
@classmethod
def adapt_emulated_to_native(cls, impl, **kw):
"""Produce a MySQL native :class:`.mysql.ENUM` from plain
:class:`.Enum`.
"""
kw.setdefault("validate_strings", impl.validate_strings)
kw.setdefault("values_callable", impl.values_callable)
kw.setdefault("omit_aliases", impl._omit_aliases)
return cls(**kw)
def _object_value_for_elem(self, elem):
# mysql sends back a blank string for any value that
# was persisted that was not in the enums; that is, it does no
# validation on the incoming data, it "truncates" it to be
# the blank string. Return it straight.
if elem == "":
return elem
else:
return super()._object_value_for_elem(elem)
def __repr__(self):
return util.generic_repr(
self, to_inspect=[ENUM, _StringType, sqltypes.Enum]
)
class SET(_StringType):
"""MySQL SET type."""
__visit_name__ = "SET"
def __init__(self, *values, **kw):
"""Construct a SET.
E.g.::
Column("myset", SET("foo", "bar", "baz"))
The list of potential values is required in the case that this
set will be used to generate DDL for a table, or if the
:paramref:`.SET.retrieve_as_bitwise` flag is set to True.
:param values: The range of valid values for this SET. The values
are not quoted, they will be escaped and surrounded by single
quotes when generating the schema.
:param convert_unicode: Same flag as that of
:paramref:`.String.convert_unicode`.
:param collation: same as that of :paramref:`.String.collation`
:param charset: same as that of :paramref:`.VARCHAR.charset`.
:param ascii: same as that of :paramref:`.VARCHAR.ascii`.
:param unicode: same as that of :paramref:`.VARCHAR.unicode`.
:param binary: same as that of :paramref:`.VARCHAR.binary`.
:param retrieve_as_bitwise: if True, the data for the set type will be
persisted and selected using an integer value, where a set is coerced
into a bitwise mask for persistence. MySQL allows this mode which
has the advantage of being able to store values unambiguously,
such as the blank string ``''``. The datatype will appear
as the expression ``col + 0`` in a SELECT statement, so that the
value is coerced into an integer value in result sets.
This flag is required if one wishes
to persist a set that can store the blank string ``''`` as a value.
.. warning::
When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
essential that the list of set values is expressed in the
**exact same order** as exists on the MySQL database.
"""
self.retrieve_as_bitwise = kw.pop("retrieve_as_bitwise", False)
self.values = tuple(values)
if not self.retrieve_as_bitwise and "" in values:
raise exc.ArgumentError(
"Can't use the blank value '' in a SET without "
"setting retrieve_as_bitwise=True"
)
if self.retrieve_as_bitwise:
self._bitmap = {
value: 2**idx for idx, value in enumerate(self.values)
}
self._bitmap.update(
(2**idx, value) for idx, value in enumerate(self.values)
)
length = max([len(v) for v in values] + [0])
kw.setdefault("length", length)
super().__init__(**kw)
def column_expression(self, colexpr):
if self.retrieve_as_bitwise:
return sql.type_coerce(
sql.type_coerce(colexpr, sqltypes.Integer) + 0, self
)
else:
return colexpr
def result_processor(self, dialect, coltype):
if self.retrieve_as_bitwise:
def process(value):
if value is not None:
value = int(value)
return set(util.map_bits(self._bitmap.__getitem__, value))
else:
return None
else:
super_convert = super().result_processor(dialect, coltype)
def process(value):
if isinstance(value, str):
# MySQLdb returns a string, let's parse
if super_convert:
value = super_convert(value)
return set(re.findall(r"[^,]+", value))
else:
# mysql-connector-python does a naive
# split(",") which throws in an empty string
if value is not None:
value.discard("")
return value
return process
def bind_processor(self, dialect):
super_convert = super().bind_processor(dialect)
if self.retrieve_as_bitwise:
def process(value):
if value is None:
return None
elif isinstance(value, (int, str)):
if super_convert:
return super_convert(value)
else:
return value
else:
int_value = 0
for v in value:
int_value |= self._bitmap[v]
return int_value
else:
def process(value):
# accept strings and int (actually bitflag) values directly
if value is not None and not isinstance(value, (int, str)):
value = ",".join(value)
if super_convert:
return super_convert(value)
else:
return value
return process
def adapt(self, impltype, **kw):
kw["retrieve_as_bitwise"] = self.retrieve_as_bitwise
return util.constructor_copy(self, impltype, *self.values, **kw)
def __repr__(self):
return util.generic_repr(
self,
to_inspect=[SET, _StringType],
additional_kw=[
("retrieve_as_bitwise", False),
],
)

View File

@ -0,0 +1,143 @@
# dialects/mysql/expression.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from ... import exc
from ... import util
from ...sql import coercions
from ...sql import elements
from ...sql import operators
from ...sql import roles
from ...sql.base import _generative
from ...sql.base import Generative
from ...util.typing import Self
class match(Generative, elements.BinaryExpression):
"""Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause.
E.g.::
from sqlalchemy import desc
from sqlalchemy.dialects.mysql import match
match_expr = match(
users_table.c.firstname,
users_table.c.lastname,
against="Firstname Lastname",
)
stmt = (
select(users_table)
.where(match_expr.in_boolean_mode())
.order_by(desc(match_expr))
)
Would produce SQL resembling:
.. sourcecode:: sql
SELECT id, firstname, lastname
FROM user
WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE)
ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC
The :func:`_mysql.match` function is a standalone version of the
:meth:`_sql.ColumnElement.match` method available on all
SQL expressions, as when :meth:`_expression.ColumnElement.match` is
used, but allows to pass multiple columns
:param cols: column expressions to match against
:param against: expression to be compared towards
:param in_boolean_mode: boolean, set "boolean mode" to true
:param in_natural_language_mode: boolean , set "natural language" to true
:param with_query_expansion: boolean, set "query expansion" to true
.. versionadded:: 1.4.19
.. seealso::
:meth:`_expression.ColumnElement.match`
"""
__visit_name__ = "mysql_match"
inherit_cache = True
def __init__(self, *cols, **kw):
if not cols:
raise exc.ArgumentError("columns are required")
against = kw.pop("against", None)
if against is None:
raise exc.ArgumentError("against is required")
against = coercions.expect(
roles.ExpressionElementRole,
against,
)
left = elements.BooleanClauseList._construct_raw(
operators.comma_op,
clauses=cols,
)
left.group = False
flags = util.immutabledict(
{
"mysql_boolean_mode": kw.pop("in_boolean_mode", False),
"mysql_natural_language": kw.pop(
"in_natural_language_mode", False
),
"mysql_query_expansion": kw.pop("with_query_expansion", False),
}
)
if kw:
raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw)))
super().__init__(left, against, operators.match_op, modifiers=flags)
@_generative
def in_boolean_mode(self) -> Self:
"""Apply the "IN BOOLEAN MODE" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_boolean_mode": True})
return self
@_generative
def in_natural_language_mode(self) -> Self:
"""Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH
expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_natural_language": True})
return self
@_generative
def with_query_expansion(self) -> Self:
"""Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_query_expansion": True})
return self

View File

@ -0,0 +1,81 @@
# dialects/mysql/json.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from ... import types as sqltypes
class JSON(sqltypes.JSON):
"""MySQL JSON type.
MySQL supports JSON as of version 5.7.
MariaDB supports JSON (as an alias for LONGTEXT) as of version 10.2.
:class:`_mysql.JSON` is used automatically whenever the base
:class:`_types.JSON` datatype is used against a MySQL or MariaDB backend.
.. seealso::
:class:`_types.JSON` - main documentation for the generic
cross-platform JSON datatype.
The :class:`.mysql.JSON` type supports persistence of JSON values
as well as the core index operations provided by :class:`_types.JSON`
datatype, by adapting the operations to render the ``JSON_EXTRACT``
function at the database level.
"""
pass
class _FormatTypeMixin:
def _format_value(self, value):
raise NotImplementedError()
def bind_processor(self, dialect):
super_proc = self.string_bind_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
def literal_processor(self, dialect):
super_proc = self.string_literal_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
def _format_value(self, value):
if isinstance(value, int):
value = "$[%s]" % value
else:
value = '$."%s"' % value
return value
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
def _format_value(self, value):
return "$%s" % (
"".join(
[
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
for elem in value
]
)
)

View File

@ -0,0 +1,67 @@
# dialects/mysql/mariadb.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from .base import MariaDBIdentifierPreparer
from .base import MySQLDialect
from .base import MySQLTypeCompiler
from ...sql import sqltypes
class INET4(sqltypes.TypeEngine[str]):
"""INET4 column type for MariaDB
.. versionadded:: 2.0.37
"""
__visit_name__ = "INET4"
class INET6(sqltypes.TypeEngine[str]):
"""INET6 column type for MariaDB
.. versionadded:: 2.0.37
"""
__visit_name__ = "INET6"
class MariaDBTypeCompiler(MySQLTypeCompiler):
def visit_INET4(self, type_, **kwargs) -> str:
return "INET4"
def visit_INET6(self, type_, **kwargs) -> str:
return "INET6"
class MariaDBDialect(MySQLDialect):
is_mariadb = True
supports_statement_cache = True
name = "mariadb"
preparer = MariaDBIdentifierPreparer
type_compiler_cls = MariaDBTypeCompiler
def loader(driver):
dialect_mod = __import__(
"sqlalchemy.dialects.mysql.%s" % driver
).dialects.mysql
driver_mod = getattr(dialect_mod, driver)
if hasattr(driver_mod, "mariadb_dialect"):
driver_cls = driver_mod.mariadb_dialect
return driver_cls
else:
driver_cls = driver_mod.dialect
return type(
"MariaDBDialect_%s" % driver,
(
MariaDBDialect,
driver_cls,
),
{"supports_statement_cache": True},
)

View File

@ -0,0 +1,277 @@
# dialects/mysql/mariadbconnector.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
"""
.. dialect:: mysql+mariadbconnector
:name: MariaDB Connector/Python
:dbapi: mariadb
:connectstring: mariadb+mariadbconnector://<user>:<password>@<host>[:<port>]/<dbname>
:url: https://pypi.org/project/mariadb/
Driver Status
-------------
MariaDB Connector/Python enables Python programs to access MariaDB and MySQL
databases using an API which is compliant with the Python DB API 2.0 (PEP-249).
It is written in C and uses MariaDB Connector/C client library for client server
communication.
Note that the default driver for a ``mariadb://`` connection URI continues to
be ``mysqldb``. ``mariadb+mariadbconnector://`` is required to use this driver.
.. mariadb: https://github.com/mariadb-corporation/mariadb-connector-python
""" # noqa
import re
from uuid import UUID as _python_UUID
from .base import MySQLCompiler
from .base import MySQLDialect
from .base import MySQLExecutionContext
from ... import sql
from ... import util
from ...sql import sqltypes
mariadb_cpy_minimum_version = (1, 0, 1)
class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]):
# work around JIRA issue
# https://jira.mariadb.org/browse/CONPY-270. When that issue is fixed,
# this type can be removed.
def result_processor(self, dialect, coltype):
if self.as_uuid:
def process(value):
if value is not None:
if hasattr(value, "decode"):
value = value.decode("ascii")
value = _python_UUID(value)
return value
return process
else:
def process(value):
if value is not None:
if hasattr(value, "decode"):
value = value.decode("ascii")
value = str(_python_UUID(value))
return value
return process
class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
_lastrowid = None
def create_server_side_cursor(self):
return self._dbapi_connection.cursor(buffered=False)
def create_default_cursor(self):
return self._dbapi_connection.cursor(buffered=True)
def post_exec(self):
super().post_exec()
self._rowcount = self.cursor.rowcount
if self.isinsert and self.compiled.postfetch_lastrowid:
self._lastrowid = self.cursor.lastrowid
def get_lastrowid(self):
return self._lastrowid
class MySQLCompiler_mariadbconnector(MySQLCompiler):
pass
class MySQLDialect_mariadbconnector(MySQLDialect):
driver = "mariadbconnector"
supports_statement_cache = True
# set this to True at the module level to prevent the driver from running
# against a backend that server detects as MySQL. currently this appears to
# be unnecessary as MariaDB client libraries have always worked against
# MySQL databases. However, if this changes at some point, this can be
# adjusted, but PLEASE ADD A TEST in test/dialect/mysql/test_dialect.py if
# this change is made at some point to ensure the correct exception
# is raised at the correct point when running the driver against
# a MySQL backend.
# is_mariadb = True
supports_unicode_statements = True
encoding = "utf8mb4"
convert_unicode = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = "qmark"
execution_ctx_cls = MySQLExecutionContext_mariadbconnector
statement_compiler = MySQLCompiler_mariadbconnector
supports_server_side_cursors = True
colspecs = util.update_copy(
MySQLDialect.colspecs, {sqltypes.Uuid: _MariaDBUUID}
)
@util.memoized_property
def _dbapi_version(self):
if self.dbapi and hasattr(self.dbapi, "__version__"):
return tuple(
[
int(x)
for x in re.findall(
r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
)
]
)
else:
return (99, 99, 99)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.paramstyle = "qmark"
if self.dbapi is not None:
if self._dbapi_version < mariadb_cpy_minimum_version:
raise NotImplementedError(
"The minimum required version for MariaDB "
"Connector/Python is %s"
% ".".join(str(x) for x in mariadb_cpy_minimum_version)
)
@classmethod
def import_dbapi(cls):
return __import__("mariadb")
def is_disconnect(self, e, connection, cursor):
if super().is_disconnect(e, connection, cursor):
return True
elif isinstance(e, self.dbapi.Error):
str_e = str(e).lower()
return "not connected" in str_e or "isn't valid" in str_e
else:
return False
def create_connect_args(self, url):
opts = url.translate_connect_args()
opts.update(url.query)
int_params = [
"connect_timeout",
"read_timeout",
"write_timeout",
"client_flag",
"port",
"pool_size",
]
bool_params = [
"local_infile",
"ssl_verify_cert",
"ssl",
"pool_reset_connection",
"compress",
]
for key in int_params:
util.coerce_kw_type(opts, key, int)
for key in bool_params:
util.coerce_kw_type(opts, key, bool)
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
# supports_sane_rowcount.
client_flag = opts.get("client_flag", 0)
if self.dbapi is not None:
try:
CLIENT_FLAGS = __import__(
self.dbapi.__name__ + ".constants.CLIENT"
).constants.CLIENT
client_flag |= CLIENT_FLAGS.FOUND_ROWS
except (AttributeError, ImportError):
self.supports_sane_rowcount = False
opts["client_flag"] = client_flag
return [[], opts]
def _extract_error_code(self, exception):
try:
rc = exception.errno
except:
rc = -1
return rc
def _detect_charset(self, connection):
return "utf8mb4"
def get_isolation_level_values(self, dbapi_connection):
return (
"SERIALIZABLE",
"READ UNCOMMITTED",
"READ COMMITTED",
"REPEATABLE READ",
"AUTOCOMMIT",
)
def set_isolation_level(self, connection, level):
if level == "AUTOCOMMIT":
connection.autocommit = True
else:
connection.autocommit = False
super().set_isolation_level(connection, level)
def do_begin_twophase(self, connection, xid):
connection.execute(
sql.text("XA BEGIN :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
def do_prepare_twophase(self, connection, xid):
connection.execute(
sql.text("XA END :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
connection.execute(
sql.text("XA PREPARE :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
def do_rollback_twophase(
self, connection, xid, is_prepared=True, recover=False
):
if not is_prepared:
connection.execute(
sql.text("XA END :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
connection.execute(
sql.text("XA ROLLBACK :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
def do_commit_twophase(
self, connection, xid, is_prepared=True, recover=False
):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
connection.execute(
sql.text("XA COMMIT :xid").bindparams(
sql.bindparam("xid", xid, literal_execute=True)
)
)
dialect = MySQLDialect_mariadbconnector

View File

@ -0,0 +1,245 @@
# dialects/mysql/mysqlconnector.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+mysqlconnector
:name: MySQL Connector/Python
:dbapi: myconnpy
:connectstring: mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
:url: https://pypi.org/project/mysql-connector-python/
Driver Status
-------------
MySQL Connector/Python is supported as of SQLAlchemy 2.0.39 to the
degree which the driver is functional. There are still ongoing issues
with features such as server side cursors which remain disabled until
upstream issues are repaired.
.. versionchanged:: 2.0.39
The MySQL Connector/Python dialect has been updated to support the
latest version of this DBAPI. Previously, MySQL Connector/Python
was not fully supported.
Connecting to MariaDB with MySQL Connector/Python
--------------------------------------------------
MySQL Connector/Python may attempt to pass an incompatible collation to the
database when connecting to MariaDB. Experimentation has shown that using
``?charset=utf8mb4&collation=utfmb4_general_ci`` or similar MariaDB-compatible
charset/collation will allow connectivity.
""" # noqa
import re
from .base import BIT
from .base import MariaDBIdentifierPreparer
from .base import MySQLCompiler
from .base import MySQLDialect
from .base import MySQLExecutionContext
from .base import MySQLIdentifierPreparer
from .mariadb import MariaDBDialect
from ... import util
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def create_server_side_cursor(self):
return self._dbapi_connection.cursor(buffered=False)
def create_default_cursor(self):
return self._dbapi_connection.cursor(buffered=True)
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return (
self.process(binary.left, **kw)
+ " % "
+ self.process(binary.right, **kw)
)
class IdentifierPreparerCommon_mysqlconnector:
@property
def _double_percents(self):
return False
@_double_percents.setter
def _double_percents(self, value):
pass
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value
class MySQLIdentifierPreparer_mysqlconnector(
IdentifierPreparerCommon_mysqlconnector, MySQLIdentifierPreparer
):
pass
class MariaDBIdentifierPreparer_mysqlconnector(
IdentifierPreparerCommon_mysqlconnector, MariaDBIdentifierPreparer
):
pass
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = "mysqlconnector"
supports_statement_cache = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
supports_native_bit = True
# not until https://bugs.mysql.com/bug.php?id=117548
supports_server_side_cursors = False
default_paramstyle = "format"
statement_compiler = MySQLCompiler_mysqlconnector
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT})
@classmethod
def import_dbapi(cls):
from mysql import connector
return connector
def do_ping(self, dbapi_connection):
dbapi_connection.ping(False)
return True
def create_connect_args(self, url):
opts = url.translate_connect_args(username="user")
opts.update(url.query)
util.coerce_kw_type(opts, "allow_local_infile", bool)
util.coerce_kw_type(opts, "autocommit", bool)
util.coerce_kw_type(opts, "buffered", bool)
util.coerce_kw_type(opts, "client_flag", int)
util.coerce_kw_type(opts, "compress", bool)
util.coerce_kw_type(opts, "connection_timeout", int)
util.coerce_kw_type(opts, "connect_timeout", int)
util.coerce_kw_type(opts, "consume_results", bool)
util.coerce_kw_type(opts, "force_ipv6", bool)
util.coerce_kw_type(opts, "get_warnings", bool)
util.coerce_kw_type(opts, "pool_reset_session", bool)
util.coerce_kw_type(opts, "pool_size", int)
util.coerce_kw_type(opts, "raise_on_warnings", bool)
util.coerce_kw_type(opts, "raw", bool)
util.coerce_kw_type(opts, "ssl_verify_cert", bool)
util.coerce_kw_type(opts, "use_pure", bool)
util.coerce_kw_type(opts, "use_unicode", bool)
# note that "buffered" is set to False by default in MySQL/connector
# python. If you set it to True, then there is no way to get a server
# side cursor because the logic is written to disallow that.
# leaving this at True until
# https://bugs.mysql.com/bug.php?id=117548 can be fixed
opts["buffered"] = True
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get(
"client_flags", ClientFlag.get_default()
)
client_flags |= ClientFlag.FOUND_ROWS
opts["client_flags"] = client_flags
except Exception:
pass
return [[], opts]
@util.memoized_property
def _mysqlconnector_version_info(self):
if self.dbapi and hasattr(self.dbapi, "__version__"):
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__)
if m:
return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (
self.dbapi.OperationalError,
self.dbapi.InterfaceError,
self.dbapi.ProgrammingError,
)
if isinstance(e, exceptions):
return (
e.errno in errnos
or "MySQL Connection not available." in str(e)
or "Connection to MySQL is not available" in str(e)
)
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
def get_isolation_level_values(self, dbapi_connection):
return (
"SERIALIZABLE",
"READ UNCOMMITTED",
"READ COMMITTED",
"REPEATABLE READ",
"AUTOCOMMIT",
)
def set_isolation_level(self, connection, level):
if level == "AUTOCOMMIT":
connection.autocommit = True
else:
connection.autocommit = False
super().set_isolation_level(connection, level)
class MariaDBDialect_mysqlconnector(
MariaDBDialect, MySQLDialect_mysqlconnector
):
supports_statement_cache = True
_allows_uuid_binds = False
preparer = MariaDBIdentifierPreparer_mysqlconnector
dialect = MySQLDialect_mysqlconnector
mariadb_dialect = MariaDBDialect_mysqlconnector

View File

@ -0,0 +1,305 @@
# dialects/mysql/mysqldb.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
"""
.. dialect:: mysql+mysqldb
:name: mysqlclient (maintained fork of MySQL-Python)
:dbapi: mysqldb
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
:url: https://pypi.org/project/mysqlclient/
Driver Status
-------------
The mysqlclient DBAPI is a maintained fork of the
`MySQL-Python <https://sourceforge.net/projects/mysql-python>`_ DBAPI
that is no longer maintained. `mysqlclient`_ supports Python 2 and Python 3
and is very stable.
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
.. _mysqldb_unicode:
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
.. _mysqldb_ssl:
SSL Connections
----------------
The mysqlclient and PyMySQL DBAPIs accept an additional dictionary under the
key "ssl", which may be specified using the
:paramref:`_sa.create_engine.connect_args` dictionary::
engine = create_engine(
"mysql+mysqldb://scott:tiger@192.168.0.134/test",
connect_args={
"ssl": {
"ca": "/home/gord/client-ssl/ca.pem",
"cert": "/home/gord/client-ssl/client-cert.pem",
"key": "/home/gord/client-ssl/client-key.pem",
}
},
)
For convenience, the following keys may also be specified inline within the URL
where they will be interpreted into the "ssl" dictionary automatically:
"ssl_ca", "ssl_cert", "ssl_key", "ssl_capath", "ssl_cipher",
"ssl_check_hostname". An example is as follows::
connection_uri = (
"mysql+mysqldb://scott:tiger@192.168.0.134/test"
"?ssl_ca=/home/gord/client-ssl/ca.pem"
"&ssl_cert=/home/gord/client-ssl/client-cert.pem"
"&ssl_key=/home/gord/client-ssl/client-key.pem"
)
.. seealso::
:ref:`pymysql_ssl` in the PyMySQL dialect
Using MySQLdb with Google Cloud SQL
-----------------------------------
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
using a URL like the following:
.. sourcecode:: text
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
Server Side Cursors
-------------------
The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`.
"""
import re
from .base import MySQLCompiler
from .base import MySQLDialect
from .base import MySQLExecutionContext
from .base import MySQLIdentifierPreparer
from .base import TEXT
from ... import sql
from ... import util
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
pass
class MySQLCompiler_mysqldb(MySQLCompiler):
pass
class MySQLDialect_mysqldb(MySQLDialect):
driver = "mysqldb"
supports_statement_cache = True
supports_unicode_statements = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = "format"
execution_ctx_cls = MySQLExecutionContext_mysqldb
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._mysql_dbapi_version = (
self._parse_dbapi_version(self.dbapi.__version__)
if self.dbapi is not None and hasattr(self.dbapi, "__version__")
else (0, 0, 0)
)
def _parse_dbapi_version(self, version):
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version)
if m:
return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
else:
return (0, 0, 0)
@util.langhelpers.memoized_property
def supports_server_side_cursors(self):
try:
cursors = __import__("MySQLdb.cursors").cursors
self._sscursor = cursors.SSCursor
return True
except (ImportError, AttributeError):
return False
@classmethod
def import_dbapi(cls):
return __import__("MySQLdb")
def on_connect(self):
super_ = super().on_connect()
def on_connect(conn):
if super_ is not None:
super_(conn)
charset_name = conn.character_set_name()
if charset_name is not None:
cursor = conn.cursor()
cursor.execute("SET NAMES %s" % charset_name)
cursor.close()
return on_connect
def do_ping(self, dbapi_connection):
dbapi_connection.ping()
return True
def do_executemany(self, cursor, statement, parameters, context=None):
rowcount = cursor.executemany(statement, parameters)
if context is not None:
context._rowcount = rowcount
def _check_unicode_returns(self, connection):
# work around issue fixed in
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
# specific issue w/ the utf8mb4_bin collation and unicode returns
collation = connection.exec_driver_sql(
"show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'"
% (
self.identifier_preparer.quote("Charset"),
self.identifier_preparer.quote("Collation"),
)
).scalar()
has_utf8mb4_bin = self.server_version_info > (5,) and collation
if has_utf8mb4_bin:
additional_tests = [
sql.collate(
sql.cast(
sql.literal_column("'test collated returns'"),
TEXT(charset="utf8mb4"),
),
"utf8mb4_bin",
)
]
else:
additional_tests = []
return super()._check_unicode_returns(connection, additional_tests)
def create_connect_args(self, url, _translate_args=None):
if _translate_args is None:
_translate_args = dict(
database="db", username="user", password="passwd"
)
opts = url.translate_connect_args(**_translate_args)
opts.update(url.query)
util.coerce_kw_type(opts, "compress", bool)
util.coerce_kw_type(opts, "connect_timeout", int)
util.coerce_kw_type(opts, "read_timeout", int)
util.coerce_kw_type(opts, "write_timeout", int)
util.coerce_kw_type(opts, "client_flag", int)
util.coerce_kw_type(opts, "local_infile", bool)
# Note: using either of the below will cause all strings to be
# returned as Unicode, both in raw SQL operations and with column
# types like String and MSString.
util.coerce_kw_type(opts, "use_unicode", bool)
util.coerce_kw_type(opts, "charset", str)
# Rich values 'cursorclass' and 'conv' are not supported via
# query string.
ssl = {}
keys = [
("ssl_ca", str),
("ssl_key", str),
("ssl_cert", str),
("ssl_capath", str),
("ssl_cipher", str),
("ssl_check_hostname", bool),
]
for key, kw_type in keys:
if key in opts:
ssl[key[4:]] = opts[key]
util.coerce_kw_type(ssl, key[4:], kw_type)
del opts[key]
if ssl:
opts["ssl"] = ssl
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
# supports_sane_rowcount.
client_flag = opts.get("client_flag", 0)
client_flag_found_rows = self._found_rows_client_flag()
if client_flag_found_rows is not None:
client_flag |= client_flag_found_rows
opts["client_flag"] = client_flag
return [[], opts]
def _found_rows_client_flag(self):
if self.dbapi is not None:
try:
CLIENT_FLAGS = __import__(
self.dbapi.__name__ + ".constants.CLIENT"
).constants.CLIENT
except (AttributeError, ImportError):
return None
else:
return CLIENT_FLAGS.FOUND_ROWS
else:
return None
def _extract_error_code(self, exception):
return exception.args[0]
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
try:
# note: the SQL here would be
# "SHOW VARIABLES LIKE 'character_set%%'"
cset_name = connection.connection.character_set_name
except AttributeError:
util.warn(
"No 'character_set_name' can be detected with "
"this MySQL-Python version; "
"please upgrade to a recent version of MySQL-Python. "
"Assuming latin1."
)
return "latin1"
else:
return cset_name()
def get_isolation_level_values(self, dbapi_connection):
return (
"SERIALIZABLE",
"READ UNCOMMITTED",
"READ COMMITTED",
"REPEATABLE READ",
"AUTOCOMMIT",
)
def set_isolation_level(self, dbapi_connection, level):
if level == "AUTOCOMMIT":
dbapi_connection.autocommit(True)
else:
dbapi_connection.autocommit(False)
super().set_isolation_level(dbapi_connection, level)
dialect = MySQLDialect_mysqldb

View File

@ -0,0 +1,114 @@
# dialects/mysql/provision.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from ... import exc
from ...testing.provision import configure_follower
from ...testing.provision import create_db
from ...testing.provision import drop_db
from ...testing.provision import generate_driver_url
from ...testing.provision import temp_table_keyword_args
from ...testing.provision import upsert
@generate_driver_url.for_db("mysql", "mariadb")
def generate_driver_url(url, driver, query_str):
backend = url.get_backend_name()
# NOTE: at the moment, tests are running mariadbconnector
# against both mariadb and mysql backends. if we want this to be
# limited, do the decision making here to reject a "mysql+mariadbconnector"
# URL. Optionally also re-enable the module level
# MySQLDialect_mariadbconnector.is_mysql flag as well, which must include
# a unit and/or functional test.
# all the Jenkins tests have been running mysqlclient Python library
# built against mariadb client drivers for years against all MySQL /
# MariaDB versions going back to MySQL 5.6, currently they can talk
# to MySQL databases without problems.
if backend == "mysql":
dialect_cls = url.get_dialect()
if dialect_cls._is_mariadb_from_url(url):
backend = "mariadb"
new_url = url.set(
drivername="%s+%s" % (backend, driver)
).update_query_string(query_str)
if driver == "mariadbconnector":
new_url = new_url.difference_update_query(["charset"])
elif driver == "mysqlconnector":
new_url = new_url.update_query_pairs(
[("collation", "utf8mb4_general_ci")]
)
try:
new_url.get_dialect()
except exc.NoSuchModuleError:
return None
else:
return new_url
@create_db.for_db("mysql", "mariadb")
def _mysql_create_db(cfg, eng, ident):
with eng.begin() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
except Exception:
pass
with eng.begin() as conn:
conn.exec_driver_sql(
"CREATE DATABASE %s CHARACTER SET utf8mb4" % ident
)
conn.exec_driver_sql(
"CREATE DATABASE %s_test_schema CHARACTER SET utf8mb4" % ident
)
conn.exec_driver_sql(
"CREATE DATABASE %s_test_schema_2 CHARACTER SET utf8mb4" % ident
)
@configure_follower.for_db("mysql", "mariadb")
def _mysql_configure_follower(config, ident):
config.test_schema = "%s_test_schema" % ident
config.test_schema_2 = "%s_test_schema_2" % ident
@drop_db.for_db("mysql", "mariadb")
def _mysql_drop_db(cfg, eng, ident):
with eng.begin() as conn:
conn.exec_driver_sql("DROP DATABASE %s_test_schema" % ident)
conn.exec_driver_sql("DROP DATABASE %s_test_schema_2" % ident)
conn.exec_driver_sql("DROP DATABASE %s" % ident)
@temp_table_keyword_args.for_db("mysql", "mariadb")
def _mysql_temp_table_keyword_args(cfg, eng):
return {"prefixes": ["TEMPORARY"]}
@upsert.for_db("mariadb")
def _upsert(
cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False
):
from sqlalchemy.dialects.mysql import insert
stmt = insert(table)
if set_lambda:
stmt = stmt.on_duplicate_key_update(**set_lambda(stmt.inserted))
else:
pk1 = table.primary_key.c[0]
stmt = stmt.on_duplicate_key_update({pk1.key: pk1})
stmt = stmt.returning(
*returning, sort_by_parameter_order=sort_by_parameter_order
)
return stmt

View File

@ -0,0 +1,136 @@
# dialects/mysql/pymysql.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+pymysql
:name: PyMySQL
:dbapi: pymysql
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>[?<options>]
:url: https://pymysql.readthedocs.io/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
.. _pymysql_ssl:
SSL Connections
------------------
The PyMySQL DBAPI accepts the same SSL arguments as that of MySQLdb,
described at :ref:`mysqldb_ssl`. See that section for additional examples.
If the server uses an automatically-generated certificate that is self-signed
or does not match the host name (as seen from the client), it may also be
necessary to indicate ``ssl_check_hostname=false`` in PyMySQL::
connection_uri = (
"mysql+pymysql://scott:tiger@192.168.0.134/test"
"?ssl_ca=/home/gord/client-ssl/ca.pem"
"&ssl_cert=/home/gord/client-ssl/client-cert.pem"
"&ssl_key=/home/gord/client-ssl/client-key.pem"
"&ssl_check_hostname=false"
)
MySQL-Python Compatibility
--------------------------
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
and targets 100% compatibility. Most behavioral notes for MySQL-python apply
to the pymysql driver as well.
""" # noqa
from .mysqldb import MySQLDialect_mysqldb
from ...util import langhelpers
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = "pymysql"
supports_statement_cache = True
description_encoding = None
@langhelpers.memoized_property
def supports_server_side_cursors(self):
try:
cursors = __import__("pymysql.cursors").cursors
self._sscursor = cursors.SSCursor
return True
except (ImportError, AttributeError):
return False
@classmethod
def import_dbapi(cls):
return __import__("pymysql")
@langhelpers.memoized_property
def _send_false_to_ping(self):
"""determine if pymysql has deprecated, changed the default of,
or removed the 'reconnect' argument of connection.ping().
See #10492 and
https://github.com/PyMySQL/mysqlclient/discussions/651#discussioncomment-7308971
for background.
""" # noqa: E501
try:
Connection = __import__(
"pymysql.connections"
).connections.Connection
except (ImportError, AttributeError):
return True
else:
insp = langhelpers.get_callable_argspec(Connection.ping)
try:
reconnect_arg = insp.args[1]
except IndexError:
return False
else:
return reconnect_arg == "reconnect" and (
not insp.defaults or insp.defaults[0] is not False
)
def do_ping(self, dbapi_connection):
if self._send_false_to_ping:
dbapi_connection.ping(False)
else:
dbapi_connection.ping()
return True
def create_connect_args(self, url, _translate_args=None):
if _translate_args is None:
_translate_args = dict(username="user")
return super().create_connect_args(
url, _translate_args=_translate_args
)
def is_disconnect(self, e, connection, cursor):
if super().is_disconnect(e, connection, cursor):
return True
elif isinstance(e, self.dbapi.Error):
str_e = str(e).lower()
return (
"already closed" in str_e or "connection was killed" in str_e
)
else:
return False
def _extract_error_code(self, exception):
if isinstance(exception.args[0], Exception):
exception = exception.args[0]
return exception.args[0]
dialect = MySQLDialect_pymysql

View File

@ -0,0 +1,139 @@
# dialects/mysql/pyodbc.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
r"""
.. dialect:: mysql+pyodbc
:name: PyODBC
:dbapi: pyodbc
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
:url: https://pypi.org/project/pyodbc/
.. note::
The PyODBC for MySQL dialect is **not tested as part of
SQLAlchemy's continuous integration**.
The recommended MySQL dialects are mysqlclient and PyMySQL.
However, if you want to use the mysql+pyodbc dialect and require
full support for ``utf8mb4`` characters (including supplementary
characters like emoji) be sure to use a current release of
MySQL Connector/ODBC and specify the "ANSI" (**not** "Unicode")
version of the driver in your DSN or connection string.
Pass through exact pyodbc connection string::
import urllib
connection_string = (
"DRIVER=MySQL ODBC 8.0 ANSI Driver;"
"SERVER=localhost;"
"PORT=3307;"
"DATABASE=mydb;"
"UID=root;"
"PWD=(whatever);"
"charset=utf8mb4;"
)
params = urllib.parse.quote_plus(connection_string)
connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params
""" # noqa
import re
from .base import MySQLDialect
from .base import MySQLExecutionContext
from .types import TIME
from ... import exc
from ... import util
from ...connectors.pyodbc import PyODBCConnector
from ...sql.sqltypes import Time
class _pyodbcTIME(TIME):
def result_processor(self, dialect, coltype):
def process(value):
# pyodbc returns a datetime.time object; no need to convert
return value
return process
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
def get_lastrowid(self):
cursor = self.create_cursor()
cursor.execute("SELECT LAST_INSERT_ID()")
lastrowid = cursor.fetchone()[0]
cursor.close()
return lastrowid
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
supports_statement_cache = True
colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME})
supports_unicode_statements = True
execution_ctx_cls = MySQLExecutionContext_pyodbc
pyodbc_driver_name = "MySQL"
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
# Prefer 'character_set_results' for the current connection over the
# value in the driver. SET NAMES or individual variable SETs will
# change the charset without updating the driver's view of the world.
#
# If it's decided that issuing that sort of SQL leaves you SOL, then
# this can prefer the driver value.
# set this to None as _fetch_setting attempts to use it (None is OK)
self._connection_charset = None
try:
value = self._fetch_setting(connection, "character_set_client")
if value:
return value
except exc.DBAPIError:
pass
util.warn(
"Could not detect the connection character set. "
"Assuming latin1."
)
return "latin1"
def _get_server_version_info(self, connection):
return MySQLDialect._get_server_version_info(self, connection)
def _extract_error_code(self, exception):
m = re.compile(r"\((\d+)\)").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
else:
return None
def on_connect(self):
super_ = super().on_connect()
def on_connect(conn):
if super_ is not None:
super_(conn)
# declare Unicode encoding for pyodbc as per
# https://github.com/mkleehammer/pyodbc/wiki/Unicode
pyodbc_SQL_CHAR = 1 # pyodbc.SQL_CHAR
pyodbc_SQL_WCHAR = -8 # pyodbc.SQL_WCHAR
conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8")
conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8")
conn.setencoding(encoding="utf-8")
return on_connect
dialect = MySQLDialect_pyodbc

View File

@ -0,0 +1,677 @@
# dialects/mysql/reflection.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
import re
from .enumerated import ENUM
from .enumerated import SET
from .types import DATETIME
from .types import TIME
from .types import TIMESTAMP
from ... import log
from ... import types as sqltypes
from ... import util
class ReflectedState:
"""Stores raw information about a SHOW CREATE TABLE statement."""
def __init__(self):
self.columns = []
self.table_options = {}
self.table_name = None
self.keys = []
self.fk_constraints = []
self.ck_constraints = []
@log.class_logger
class MySQLTableDefinitionParser:
"""Parses the results of a SHOW CREATE TABLE statement."""
def __init__(self, dialect, preparer):
self.dialect = dialect
self.preparer = preparer
self._prep_regexes()
def parse(self, show_create, charset):
state = ReflectedState()
state.charset = charset
for line in re.split(r"\r?\n", show_create):
if line.startswith(" " + self.preparer.initial_quote):
self._parse_column(line, state)
# a regular table options line
elif line.startswith(") "):
self._parse_table_options(line, state)
# an ANSI-mode table options line
elif line == ")":
pass
elif line.startswith("CREATE "):
self._parse_table_name(line, state)
elif "PARTITION" in line:
self._parse_partition_options(line, state)
# Not present in real reflection, but may be if
# loading from a file.
elif not line:
pass
else:
type_, spec = self._parse_constraints(line)
if type_ is None:
util.warn("Unknown schema content: %r" % line)
elif type_ == "key":
state.keys.append(spec)
elif type_ == "fk_constraint":
state.fk_constraints.append(spec)
elif type_ == "ck_constraint":
state.ck_constraints.append(spec)
else:
pass
return state
def _check_view(self, sql: str) -> bool:
return bool(self._re_is_view.match(sql))
def _parse_constraints(self, line):
"""Parse a KEY or CONSTRAINT line.
:param line: A line of SHOW CREATE TABLE output
"""
# KEY
m = self._re_key.match(line)
if m:
spec = m.groupdict()
# convert columns into name, length pairs
# NOTE: we may want to consider SHOW INDEX as the
# format of indexes in MySQL becomes more complex
spec["columns"] = self._parse_keyexprs(spec["columns"])
if spec["version_sql"]:
m2 = self._re_key_version_sql.match(spec["version_sql"])
if m2 and m2.groupdict()["parser"]:
spec["parser"] = m2.groupdict()["parser"]
if spec["parser"]:
spec["parser"] = self.preparer.unformat_identifiers(
spec["parser"]
)[0]
return "key", spec
# FOREIGN KEY CONSTRAINT
m = self._re_fk_constraint.match(line)
if m:
spec = m.groupdict()
spec["table"] = self.preparer.unformat_identifiers(spec["table"])
spec["local"] = [c[0] for c in self._parse_keyexprs(spec["local"])]
spec["foreign"] = [
c[0] for c in self._parse_keyexprs(spec["foreign"])
]
return "fk_constraint", spec
# CHECK constraint
m = self._re_ck_constraint.match(line)
if m:
spec = m.groupdict()
return "ck_constraint", spec
# PARTITION and SUBPARTITION
m = self._re_partition.match(line)
if m:
# Punt!
return "partition", line
# No match.
return (None, line)
def _parse_table_name(self, line, state):
"""Extract the table name.
:param line: The first line of SHOW CREATE TABLE
"""
regex, cleanup = self._pr_name
m = regex.match(line)
if m:
state.table_name = cleanup(m.group("name"))
def _parse_table_options(self, line, state):
"""Build a dictionary of all reflected table-level options.
:param line: The final line of SHOW CREATE TABLE output.
"""
options = {}
if line and line != ")":
rest_of_line = line
for regex, cleanup in self._pr_options:
m = regex.search(rest_of_line)
if not m:
continue
directive, value = m.group("directive"), m.group("val")
if cleanup:
value = cleanup(value)
options[directive.lower()] = value
rest_of_line = regex.sub("", rest_of_line)
for nope in ("auto_increment", "data directory", "index directory"):
options.pop(nope, None)
for opt, val in options.items():
state.table_options["%s_%s" % (self.dialect.name, opt)] = val
def _parse_partition_options(self, line, state):
options = {}
new_line = line[:]
while new_line.startswith("(") or new_line.startswith(" "):
new_line = new_line[1:]
for regex, cleanup in self._pr_options:
m = regex.search(new_line)
if not m or "PARTITION" not in regex.pattern:
continue
directive = m.group("directive")
directive = directive.lower()
is_subpartition = directive == "subpartition"
if directive == "partition" or is_subpartition:
new_line = new_line.replace(") */", "")
new_line = new_line.replace(",", "")
if is_subpartition and new_line.endswith(")"):
new_line = new_line[:-1]
if self.dialect.name == "mariadb" and new_line.endswith(")"):
if (
"MAXVALUE" in new_line
or "MINVALUE" in new_line
or "ENGINE" in new_line
):
# final line of MariaDB partition endswith ")"
new_line = new_line[:-1]
defs = "%s_%s_definitions" % (self.dialect.name, directive)
options[defs] = new_line
else:
directive = directive.replace(" ", "_")
value = m.group("val")
if cleanup:
value = cleanup(value)
options[directive] = value
break
for opt, val in options.items():
part_def = "%s_partition_definitions" % (self.dialect.name)
subpart_def = "%s_subpartition_definitions" % (self.dialect.name)
if opt == part_def or opt == subpart_def:
# builds a string of definitions
if opt not in state.table_options:
state.table_options[opt] = val
else:
state.table_options[opt] = "%s, %s" % (
state.table_options[opt],
val,
)
else:
state.table_options["%s_%s" % (self.dialect.name, opt)] = val
def _parse_column(self, line, state):
"""Extract column details.
Falls back to a 'minimal support' variant if full parse fails.
:param line: Any column-bearing line from SHOW CREATE TABLE
"""
spec = None
m = self._re_column.match(line)
if m:
spec = m.groupdict()
spec["full"] = True
else:
m = self._re_column_loose.match(line)
if m:
spec = m.groupdict()
spec["full"] = False
if not spec:
util.warn("Unknown column definition %r" % line)
return
if not spec["full"]:
util.warn("Incomplete reflection of column definition %r" % line)
name, type_, args = spec["name"], spec["coltype"], spec["arg"]
try:
col_type = self.dialect.ischema_names[type_]
except KeyError:
util.warn(
"Did not recognize type '%s' of column '%s'" % (type_, name)
)
col_type = sqltypes.NullType
# Column type positional arguments eg. varchar(32)
if args is None or args == "":
type_args = []
elif args[0] == "'" and args[-1] == "'":
type_args = self._re_csv_str.findall(args)
else:
type_args = [int(v) for v in self._re_csv_int.findall(args)]
# Column type keyword options
type_kw = {}
if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
if type_args:
type_kw["fsp"] = type_args.pop(0)
for kw in ("unsigned", "zerofill"):
if spec.get(kw, False):
type_kw[kw] = True
for kw in ("charset", "collate"):
if spec.get(kw, False):
type_kw[kw] = spec[kw]
if issubclass(col_type, (ENUM, SET)):
type_args = _strip_values(type_args)
if issubclass(col_type, SET) and "" in type_args:
type_kw["retrieve_as_bitwise"] = True
type_instance = col_type(*type_args, **type_kw)
col_kw = {}
# NOT NULL
col_kw["nullable"] = True
# this can be "NULL" in the case of TIMESTAMP
if spec.get("notnull", False) == "NOT NULL":
col_kw["nullable"] = False
# For generated columns, the nullability is marked in a different place
if spec.get("notnull_generated", False) == "NOT NULL":
col_kw["nullable"] = False
# AUTO_INCREMENT
if spec.get("autoincr", False):
col_kw["autoincrement"] = True
elif issubclass(col_type, sqltypes.Integer):
col_kw["autoincrement"] = False
# DEFAULT
default = spec.get("default", None)
if default == "NULL":
# eliminates the need to deal with this later.
default = None
comment = spec.get("comment", None)
if comment is not None:
comment = cleanup_text(comment)
sqltext = spec.get("generated")
if sqltext is not None:
computed = dict(sqltext=sqltext)
persisted = spec.get("persistence")
if persisted is not None:
computed["persisted"] = persisted == "STORED"
col_kw["computed"] = computed
col_d = dict(
name=name, type=type_instance, default=default, comment=comment
)
col_d.update(col_kw)
state.columns.append(col_d)
def _describe_to_create(self, table_name, columns):
"""Re-format DESCRIBE output as a SHOW CREATE TABLE string.
DESCRIBE is a much simpler reflection and is sufficient for
reflecting views for runtime use. This method formats DDL
for columns only- keys are omitted.
:param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
SHOW FULL COLUMNS FROM rows must be rearranged for use with
this function.
"""
buffer = []
for row in columns:
(name, col_type, nullable, default, extra) = (
row[i] for i in (0, 1, 2, 4, 5)
)
line = [" "]
line.append(self.preparer.quote_identifier(name))
line.append(col_type)
if not nullable:
line.append("NOT NULL")
if default:
if "auto_increment" in default:
pass
elif col_type.startswith("timestamp") and default.startswith(
"C"
):
line.append("DEFAULT")
line.append(default)
elif default == "NULL":
line.append("DEFAULT")
line.append(default)
else:
line.append("DEFAULT")
line.append("'%s'" % default.replace("'", "''"))
if extra:
line.append(extra)
buffer.append(" ".join(line))
return "".join(
[
(
"CREATE TABLE %s (\n"
% self.preparer.quote_identifier(table_name)
),
",\n".join(buffer),
"\n) ",
]
)
def _parse_keyexprs(self, identifiers):
"""Unpack '"col"(2),"col" ASC'-ish strings into components."""
return [
(colname, int(length) if length else None, modifiers)
for colname, length, modifiers in self._re_keyexprs.findall(
identifiers
)
]
def _prep_regexes(self):
"""Pre-compile regular expressions."""
self._re_columns = []
self._pr_options = []
_final = self.preparer.final_quote
quotes = dict(
zip(
("iq", "fq", "esc_fq"),
[
re.escape(s)
for s in (
self.preparer.initial_quote,
_final,
self.preparer._escape_identifier(_final),
)
],
)
)
self._pr_name = _pr_compile(
r"^CREATE (?:\w+ +)?TABLE +"
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($" % quotes,
self.preparer._unescape_identifier,
)
self._re_is_view = _re_compile(r"^CREATE(?! TABLE)(\s.*)?\sVIEW")
# `col`,`col2`(32),`col3`(15) DESC
#
self._re_keyexprs = _re_compile(
r"(?:"
r"(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)"
r"(?:\((\d+)\))?(?: +(ASC|DESC))?(?=\,|$))+" % quotes
)
# 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
self._re_csv_str = _re_compile(r"\x27(?:\x27\x27|[^\x27])*\x27")
# 123 or 123,456
self._re_csv_int = _re_compile(r"\d+")
# `colname` <type> [type opts]
# (NOT NULL | NULL)
# DEFAULT ('value' | CURRENT_TIMESTAMP...)
# COMMENT 'comment'
# COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
# STORAGE (DISK|MEMORY)
self._re_column = _re_compile(
r" "
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
r"(?P<coltype>\w+)"
r"(?:\((?P<arg>(?:\d+|\d+,\d+|"
r"(?:'(?:''|[^'])*',?)+))\))?"
r"(?: +(?P<unsigned>UNSIGNED))?"
r"(?: +(?P<zerofill>ZEROFILL))?"
r"(?: +CHARACTER SET +(?P<charset>[\w_]+))?"
r"(?: +COLLATE +(?P<collate>[\w_]+))?"
r"(?: +(?P<notnull>(?:NOT )?NULL))?"
r"(?: +DEFAULT +(?P<default>"
r"(?:NULL|'(?:''|[^'])*'|\(.+?\)|[\-\w\.\(\)]+"
r"(?: +ON UPDATE [\-\w\.\(\)]+)?)"
r"))?"
r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P<generated>\("
r".*\))? ?(?P<persistence>VIRTUAL|STORED)?"
r"(?: +(?P<notnull_generated>(?:NOT )?NULL))?"
r")?"
r"(?: +(?P<autoincr>AUTO_INCREMENT))?"
r"(?: +COMMENT +'(?P<comment>(?:''|[^'])*)')?"
r"(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?"
r"(?: +STORAGE +(?P<storage>\w+))?"
r"(?: +(?P<extra>.*))?"
r",?$" % quotes
)
# Fallback, try to parse as little as possible
self._re_column_loose = _re_compile(
r" "
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
r"(?P<coltype>\w+)"
r"(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?"
r".*?(?P<notnull>(?:NOT )NULL)?" % quotes
)
# (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
# (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
# KEY_BLOCK_SIZE size | WITH PARSER name /*!50100 WITH PARSER name */
self._re_key = _re_compile(
r" "
r"(?:(?P<type>\S+) )?KEY"
r"(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?"
r"(?: +USING +(?P<using_pre>\S+))?"
r" +\((?P<columns>.+?)\)"
r"(?: +USING +(?P<using_post>\S+))?"
r"(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?"
r"(?: +WITH PARSER +(?P<parser>\S+))?"
r"(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?"
r"(?: +/\*(?P<version_sql>.+)\*/ *)?"
r",?$" % quotes
)
# https://forums.mysql.com/read.php?20,567102,567111#msg-567111
# It means if the MySQL version >= \d+, execute what's in the comment
self._re_key_version_sql = _re_compile(
r"\!\d+ " r"(?: *WITH PARSER +(?P<parser>\S+) *)?"
)
# CONSTRAINT `name` FOREIGN KEY (`local_col`)
# REFERENCES `remote` (`remote_col`)
# MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
# ON DELETE CASCADE ON UPDATE RESTRICT
#
# unique constraints come back as KEYs
kw = quotes.copy()
kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT"
self._re_fk_constraint = _re_compile(
r" "
r"CONSTRAINT +"
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
r"FOREIGN KEY +"
r"\((?P<local>[^\)]+?)\) REFERENCES +"
r"(?P<table>%(iq)s[^%(fq)s]+%(fq)s"
r"(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +"
r"\((?P<foreign>(?:%(iq)s[^%(fq)s]+%(fq)s(?: *, *)?)+)\)"
r"(?: +(?P<match>MATCH \w+))?"
r"(?: +ON DELETE (?P<ondelete>%(on)s))?"
r"(?: +ON UPDATE (?P<onupdate>%(on)s))?" % kw
)
# CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)'
# testing on MariaDB 10.2 shows that the CHECK constraint
# is returned on a line by itself, so to match without worrying
# about parenthesis in the expression we go to the end of the line
self._re_ck_constraint = _re_compile(
r" "
r"CONSTRAINT +"
r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
r"CHECK +"
r"\((?P<sqltext>.+)\),?" % kw
)
# PARTITION
#
# punt!
self._re_partition = _re_compile(r"(?:.*)(?:SUB)?PARTITION(?:.*)")
# Table-level options (COLLATE, ENGINE, etc.)
# Do the string options first, since they have quoted
# strings we need to get rid of.
for option in _options_of_type_string:
self._add_option_string(option)
for option in (
"ENGINE",
"TYPE",
"AUTO_INCREMENT",
"AVG_ROW_LENGTH",
"CHARACTER SET",
"DEFAULT CHARSET",
"CHECKSUM",
"COLLATE",
"DELAY_KEY_WRITE",
"INSERT_METHOD",
"MAX_ROWS",
"MIN_ROWS",
"PACK_KEYS",
"ROW_FORMAT",
"KEY_BLOCK_SIZE",
"STATS_SAMPLE_PAGES",
):
self._add_option_word(option)
for option in (
"PARTITION BY",
"SUBPARTITION BY",
"PARTITIONS",
"SUBPARTITIONS",
"PARTITION",
"SUBPARTITION",
):
self._add_partition_option_word(option)
self._add_option_regex("UNION", r"\([^\)]+\)")
self._add_option_regex("TABLESPACE", r".*? STORAGE DISK")
self._add_option_regex(
"RAID_TYPE",
r"\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+",
)
_optional_equals = r"(?:\s*(?:=\s*)|\s+)"
def _add_option_string(self, directive):
regex = r"(?P<directive>%s)%s" r"'(?P<val>(?:[^']|'')*?)'(?!')" % (
re.escape(directive),
self._optional_equals,
)
self._pr_options.append(_pr_compile(regex, cleanup_text))
def _add_option_word(self, directive):
regex = r"(?P<directive>%s)%s" r"(?P<val>\w+)" % (
re.escape(directive),
self._optional_equals,
)
self._pr_options.append(_pr_compile(regex))
def _add_partition_option_word(self, directive):
if directive == "PARTITION BY" or directive == "SUBPARTITION BY":
regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\w+.*)" % (
re.escape(directive),
self._optional_equals,
)
elif directive == "SUBPARTITIONS" or directive == "PARTITIONS":
regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\d+)" % (
re.escape(directive),
self._optional_equals,
)
else:
regex = r"(?<!\S)(?P<directive>%s)(?!\S)" % (re.escape(directive),)
self._pr_options.append(_pr_compile(regex))
def _add_option_regex(self, directive, regex):
regex = r"(?P<directive>%s)%s" r"(?P<val>%s)" % (
re.escape(directive),
self._optional_equals,
regex,
)
self._pr_options.append(_pr_compile(regex))
_options_of_type_string = (
"COMMENT",
"DATA DIRECTORY",
"INDEX DIRECTORY",
"PASSWORD",
"CONNECTION",
)
def _pr_compile(regex, cleanup=None):
"""Prepare a 2-tuple of compiled regex and callable."""
return (_re_compile(regex), cleanup)
def _re_compile(regex):
"""Compile a string to regex, I and UNICODE."""
return re.compile(regex, re.I | re.UNICODE)
def _strip_values(values):
"Strip reflected values quotes"
strip_values = []
for a in values:
if a[0:1] == '"' or a[0:1] == "'":
# strip enclosing quotes and unquote interior
a = a[1:-1].replace(a[0] * 2, a[0])
strip_values.append(a)
return strip_values
def cleanup_text(raw_text: str) -> str:
if "\\" in raw_text:
raw_text = re.sub(
_control_char_regexp, lambda s: _control_char_map[s[0]], raw_text
)
return raw_text.replace("''", "'")
_control_char_map = {
"\\\\": "\\",
"\\0": "\0",
"\\a": "\a",
"\\b": "\b",
"\\t": "\t",
"\\n": "\n",
"\\v": "\v",
"\\f": "\f",
"\\r": "\r",
# '\\e':'\e',
}
_control_char_regexp = re.compile(
"|".join(re.escape(k) for k in _control_char_map)
)

View File

@ -0,0 +1,571 @@
# dialects/mysql/reserved_words.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# generated using:
# https://gist.github.com/kkirsche/4f31f2153ed7a3248be1ec44ca6ddbc9
#
# https://mariadb.com/kb/en/reserved-words/
# includes: Reserved Words, Oracle Mode (separate set unioned)
# excludes: Exceptions, Function Names
# mypy: ignore-errors
RESERVED_WORDS_MARIADB = {
"accessible",
"add",
"all",
"alter",
"analyze",
"and",
"as",
"asc",
"asensitive",
"before",
"between",
"bigint",
"binary",
"blob",
"both",
"by",
"call",
"cascade",
"case",
"change",
"char",
"character",
"check",
"collate",
"column",
"condition",
"constraint",
"continue",
"convert",
"create",
"cross",
"current_date",
"current_role",
"current_time",
"current_timestamp",
"current_user",
"cursor",
"database",
"databases",
"day_hour",
"day_microsecond",
"day_minute",
"day_second",
"dec",
"decimal",
"declare",
"default",
"delayed",
"delete",
"desc",
"describe",
"deterministic",
"distinct",
"distinctrow",
"div",
"do_domain_ids",
"double",
"drop",
"dual",
"each",
"else",
"elseif",
"enclosed",
"escaped",
"except",
"exists",
"exit",
"explain",
"false",
"fetch",
"float",
"float4",
"float8",
"for",
"force",
"foreign",
"from",
"fulltext",
"general",
"grant",
"group",
"having",
"high_priority",
"hour_microsecond",
"hour_minute",
"hour_second",
"if",
"ignore",
"ignore_domain_ids",
"ignore_server_ids",
"in",
"index",
"infile",
"inner",
"inout",
"insensitive",
"insert",
"int",
"int1",
"int2",
"int3",
"int4",
"int8",
"integer",
"intersect",
"interval",
"into",
"is",
"iterate",
"join",
"key",
"keys",
"kill",
"leading",
"leave",
"left",
"like",
"limit",
"linear",
"lines",
"load",
"localtime",
"localtimestamp",
"lock",
"long",
"longblob",
"longtext",
"loop",
"low_priority",
"master_heartbeat_period",
"master_ssl_verify_server_cert",
"match",
"maxvalue",
"mediumblob",
"mediumint",
"mediumtext",
"middleint",
"minute_microsecond",
"minute_second",
"mod",
"modifies",
"natural",
"no_write_to_binlog",
"not",
"null",
"numeric",
"offset",
"on",
"optimize",
"option",
"optionally",
"or",
"order",
"out",
"outer",
"outfile",
"over",
"page_checksum",
"parse_vcol_expr",
"partition",
"position",
"precision",
"primary",
"procedure",
"purge",
"range",
"read",
"read_write",
"reads",
"real",
"recursive",
"ref_system_id",
"references",
"regexp",
"release",
"rename",
"repeat",
"replace",
"require",
"resignal",
"restrict",
"return",
"returning",
"revoke",
"right",
"rlike",
"rows",
"row_number",
"schema",
"schemas",
"second_microsecond",
"select",
"sensitive",
"separator",
"set",
"show",
"signal",
"slow",
"smallint",
"spatial",
"specific",
"sql",
"sql_big_result",
"sql_calc_found_rows",
"sql_small_result",
"sqlexception",
"sqlstate",
"sqlwarning",
"ssl",
"starting",
"stats_auto_recalc",
"stats_persistent",
"stats_sample_pages",
"straight_join",
"table",
"terminated",
"then",
"tinyblob",
"tinyint",
"tinytext",
"to",
"trailing",
"trigger",
"true",
"undo",
"union",
"unique",
"unlock",
"unsigned",
"update",
"usage",
"use",
"using",
"utc_date",
"utc_time",
"utc_timestamp",
"values",
"varbinary",
"varchar",
"varcharacter",
"varying",
"when",
"where",
"while",
"window",
"with",
"write",
"xor",
"year_month",
"zerofill",
}.union(
{
"body",
"elsif",
"goto",
"history",
"others",
"package",
"period",
"raise",
"rowtype",
"system",
"system_time",
"versioning",
"without",
}
)
# https://dev.mysql.com/doc/refman/8.3/en/keywords.html
# https://dev.mysql.com/doc/refman/8.0/en/keywords.html
# https://dev.mysql.com/doc/refman/5.7/en/keywords.html
# https://dev.mysql.com/doc/refman/5.6/en/keywords.html
# includes: MySQL x.0 Keywords and Reserved Words
# excludes: MySQL x.0 New Keywords and Reserved Words,
# MySQL x.0 Removed Keywords and Reserved Words
RESERVED_WORDS_MYSQL = {
"accessible",
"add",
"admin",
"all",
"alter",
"analyze",
"and",
"array",
"as",
"asc",
"asensitive",
"before",
"between",
"bigint",
"binary",
"blob",
"both",
"by",
"call",
"cascade",
"case",
"change",
"char",
"character",
"check",
"collate",
"column",
"condition",
"constraint",
"continue",
"convert",
"create",
"cross",
"cube",
"cume_dist",
"current_date",
"current_time",
"current_timestamp",
"current_user",
"cursor",
"database",
"databases",
"day_hour",
"day_microsecond",
"day_minute",
"day_second",
"dec",
"decimal",
"declare",
"default",
"delayed",
"delete",
"dense_rank",
"desc",
"describe",
"deterministic",
"distinct",
"distinctrow",
"div",
"double",
"drop",
"dual",
"each",
"else",
"elseif",
"empty",
"enclosed",
"escaped",
"except",
"exists",
"exit",
"explain",
"false",
"fetch",
"first_value",
"float",
"float4",
"float8",
"for",
"force",
"foreign",
"from",
"fulltext",
"function",
"general",
"generated",
"get",
"get_master_public_key",
"grant",
"group",
"grouping",
"groups",
"having",
"high_priority",
"hour_microsecond",
"hour_minute",
"hour_second",
"if",
"ignore",
"ignore_server_ids",
"in",
"index",
"infile",
"inner",
"inout",
"insensitive",
"insert",
"int",
"int1",
"int2",
"int3",
"int4",
"int8",
"integer",
"intersect",
"interval",
"into",
"io_after_gtids",
"io_before_gtids",
"is",
"iterate",
"join",
"json_table",
"key",
"keys",
"kill",
"lag",
"last_value",
"lateral",
"lead",
"leading",
"leave",
"left",
"like",
"limit",
"linear",
"lines",
"load",
"localtime",
"localtimestamp",
"lock",
"long",
"longblob",
"longtext",
"loop",
"low_priority",
"master_bind",
"master_heartbeat_period",
"master_ssl_verify_server_cert",
"match",
"maxvalue",
"mediumblob",
"mediumint",
"mediumtext",
"member",
"middleint",
"minute_microsecond",
"minute_second",
"mod",
"modifies",
"natural",
"no_write_to_binlog",
"not",
"nth_value",
"ntile",
"null",
"numeric",
"of",
"on",
"optimize",
"optimizer_costs",
"option",
"optionally",
"or",
"order",
"out",
"outer",
"outfile",
"over",
"parse_gcol_expr",
"parallel",
"partition",
"percent_rank",
"persist",
"persist_only",
"precision",
"primary",
"procedure",
"purge",
"qualify",
"range",
"rank",
"read",
"read_write",
"reads",
"real",
"recursive",
"references",
"regexp",
"release",
"rename",
"repeat",
"replace",
"require",
"resignal",
"restrict",
"return",
"revoke",
"right",
"rlike",
"role",
"row",
"row_number",
"rows",
"schema",
"schemas",
"second_microsecond",
"select",
"sensitive",
"separator",
"set",
"show",
"signal",
"slow",
"smallint",
"spatial",
"specific",
"sql",
"sql_after_gtids",
"sql_before_gtids",
"sql_big_result",
"sql_calc_found_rows",
"sql_small_result",
"sqlexception",
"sqlstate",
"sqlwarning",
"ssl",
"starting",
"stored",
"straight_join",
"system",
"table",
"terminated",
"then",
"tinyblob",
"tinyint",
"tinytext",
"to",
"trailing",
"trigger",
"true",
"undo",
"union",
"unique",
"unlock",
"unsigned",
"update",
"usage",
"use",
"using",
"utc_date",
"utc_time",
"utc_timestamp",
"values",
"varbinary",
"varchar",
"varcharacter",
"varying",
"virtual",
"when",
"where",
"while",
"window",
"with",
"write",
"xor",
"year_month",
"zerofill",
}

View File

@ -0,0 +1,773 @@
# dialects/mysql/types.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
import datetime
from ... import exc
from ... import util
from ...sql import sqltypes
class _NumericType:
"""Base for MySQL numeric types.
This is the base both for NUMERIC as well as INTEGER, hence
it's a mixin.
"""
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super().__init__(**kw)
def __repr__(self):
return util.generic_repr(
self, to_inspect=[_NumericType, sqltypes.Numeric]
)
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
if isinstance(self, (REAL, DOUBLE)) and (
(precision is None and scale is not None)
or (precision is not None and scale is None)
):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether."
)
super().__init__(precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
def __repr__(self):
return util.generic_repr(
self, to_inspect=[_FloatType, _NumericType, sqltypes.Float]
)
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super().__init__(**kw)
def __repr__(self):
return util.generic_repr(
self, to_inspect=[_IntegerType, _NumericType, sqltypes.Integer]
)
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(
self,
charset=None,
collation=None,
ascii=False, # noqa
binary=False,
unicode=False,
national=False,
**kw,
):
self.charset = charset
# allow collate= or collation=
kw.setdefault("collation", kw.pop("collate", collation))
self.ascii = ascii
self.unicode = unicode
self.binary = binary
self.national = national
super().__init__(**kw)
def __repr__(self):
return util.generic_repr(
self, to_inspect=[_StringType, sqltypes.String]
)
class _MatchType(sqltypes.Float, sqltypes.MatchType):
def __init__(self, **kw):
# TODO: float arguments?
sqltypes.Float.__init__(self)
sqltypes.MatchType.__init__(self)
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
__visit_name__ = "NUMERIC"
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a NUMERIC.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(
precision=precision, scale=scale, asdecimal=asdecimal, **kw
)
class DECIMAL(_NumericType, sqltypes.DECIMAL):
"""MySQL DECIMAL type."""
__visit_name__ = "DECIMAL"
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DECIMAL.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(
precision=precision, scale=scale, asdecimal=asdecimal, **kw
)
class DOUBLE(_FloatType, sqltypes.DOUBLE):
"""MySQL DOUBLE type."""
__visit_name__ = "DOUBLE"
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
.. note::
The :class:`.DOUBLE` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(
precision=precision, scale=scale, asdecimal=asdecimal, **kw
)
class REAL(_FloatType, sqltypes.REAL):
"""MySQL REAL type."""
__visit_name__ = "REAL"
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
.. note::
The :class:`.REAL` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(
precision=precision, scale=scale, asdecimal=asdecimal, **kw
)
class FLOAT(_FloatType, sqltypes.FLOAT):
"""MySQL FLOAT type."""
__visit_name__ = "FLOAT"
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
"""Construct a FLOAT.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(
precision=precision, scale=scale, asdecimal=asdecimal, **kw
)
def bind_processor(self, dialect):
return None
class INTEGER(_IntegerType, sqltypes.INTEGER):
"""MySQL INTEGER type."""
__visit_name__ = "INTEGER"
def __init__(self, display_width=None, **kw):
"""Construct an INTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(display_width=display_width, **kw)
class BIGINT(_IntegerType, sqltypes.BIGINT):
"""MySQL BIGINTEGER type."""
__visit_name__ = "BIGINT"
def __init__(self, display_width=None, **kw):
"""Construct a BIGINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(display_width=display_width, **kw)
class MEDIUMINT(_IntegerType):
"""MySQL MEDIUMINTEGER type."""
__visit_name__ = "MEDIUMINT"
def __init__(self, display_width=None, **kw):
"""Construct a MEDIUMINTEGER
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(display_width=display_width, **kw)
class TINYINT(_IntegerType):
"""MySQL TINYINT type."""
__visit_name__ = "TINYINT"
def __init__(self, display_width=None, **kw):
"""Construct a TINYINT.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(display_width=display_width, **kw)
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
"""MySQL SMALLINTEGER type."""
__visit_name__ = "SMALLINT"
def __init__(self, display_width=None, **kw):
"""Construct a SMALLINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super().__init__(display_width=display_width, **kw)
class BIT(sqltypes.TypeEngine):
"""MySQL BIT type.
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
MSTinyInteger() type.
"""
__visit_name__ = "BIT"
def __init__(self, length=None):
"""Construct a BIT.
:param length: Optional, number of bits.
"""
self.length = length
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a
long."""
if dialect.supports_native_bit:
return None
def process(value):
if value is not None:
v = 0
for i in value:
if not isinstance(i, int):
i = ord(i) # convert byte to int on Python 2
v = v << 8 | i
return v
return value
return process
class TIME(sqltypes.TIME):
"""MySQL TIME type."""
__visit_name__ = "TIME"
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
"""
super().__init__(timezone=timezone)
self.fsp = fsp
def result_processor(self, dialect, coltype):
time = datetime.time
def process(value):
# convert from a timedelta value
if value is not None:
microseconds = value.microseconds
seconds = value.seconds
minutes = seconds // 60
return time(
minutes // 60,
minutes % 60,
seconds - minutes * 60,
microsecond=microseconds,
)
else:
return None
return process
class TIMESTAMP(sqltypes.TIMESTAMP):
"""MySQL TIMESTAMP type."""
__visit_name__ = "TIMESTAMP"
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIMESTAMP type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIMESTAMP type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
"""
super().__init__(timezone=timezone)
self.fsp = fsp
class DATETIME(sqltypes.DATETIME):
"""MySQL DATETIME type."""
__visit_name__ = "DATETIME"
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL DATETIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the DATETIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
"""
super().__init__(timezone=timezone)
self.fsp = fsp
class YEAR(sqltypes.TypeEngine):
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
__visit_name__ = "YEAR"
def __init__(self, display_width=None):
self.display_width = display_width
class TEXT(_StringType, sqltypes.TEXT):
"""MySQL TEXT type, for character storage encoded up to 2^16 bytes."""
__visit_name__ = "TEXT"
def __init__(self, length=None, **kw):
"""Construct a TEXT.
:param length: Optional, if provided the server may optimize storage
by substituting the smallest TEXT type sufficient to store
``length`` bytes of characters.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super().__init__(length=length, **kw)
class TINYTEXT(_StringType):
"""MySQL TINYTEXT type, for character storage encoded up to 2^8 bytes."""
__visit_name__ = "TINYTEXT"
def __init__(self, **kwargs):
"""Construct a TINYTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super().__init__(**kwargs)
class MEDIUMTEXT(_StringType):
"""MySQL MEDIUMTEXT type, for character storage encoded up
to 2^24 bytes."""
__visit_name__ = "MEDIUMTEXT"
def __init__(self, **kwargs):
"""Construct a MEDIUMTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super().__init__(**kwargs)
class LONGTEXT(_StringType):
"""MySQL LONGTEXT type, for character storage encoded up to 2^32 bytes."""
__visit_name__ = "LONGTEXT"
def __init__(self, **kwargs):
"""Construct a LONGTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super().__init__(**kwargs)
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""MySQL VARCHAR type, for variable-length character data."""
__visit_name__ = "VARCHAR"
def __init__(self, length=None, **kwargs):
"""Construct a VARCHAR.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super().__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""MySQL CHAR type, for fixed-length character data."""
__visit_name__ = "CHAR"
def __init__(self, length=None, **kwargs):
"""Construct a CHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
super().__init__(length=length, **kwargs)
@classmethod
def _adapt_string_for_cast(cls, type_):
# copy the given string type into a CHAR
# for the purposes of rendering a CAST expression
type_ = sqltypes.to_instance(type_)
if isinstance(type_, sqltypes.CHAR):
return type_
elif isinstance(type_, _StringType):
return CHAR(
length=type_.length,
charset=type_.charset,
collation=type_.collation,
ascii=type_.ascii,
binary=type_.binary,
unicode=type_.unicode,
national=False, # not supported in CAST
)
else:
return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
For variable-length character data in the server's configured national
character set.
"""
__visit_name__ = "NVARCHAR"
def __init__(self, length=None, **kwargs):
"""Construct an NVARCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs["national"] = True
super().__init__(length=length, **kwargs)
class NCHAR(_StringType, sqltypes.NCHAR):
"""MySQL NCHAR type.
For fixed-length character data in the server's configured national
character set.
"""
__visit_name__ = "NCHAR"
def __init__(self, length=None, **kwargs):
"""Construct an NCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs["national"] = True
super().__init__(length=length, **kwargs)
class TINYBLOB(sqltypes._Binary):
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
__visit_name__ = "TINYBLOB"
class MEDIUMBLOB(sqltypes._Binary):
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
__visit_name__ = "MEDIUMBLOB"
class LONGBLOB(sqltypes._Binary):
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
__visit_name__ = "LONGBLOB"