summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/sqlalchemy/connectors
diff options
context:
space:
mode:
authorcyfraeviolae <cyfraeviolae>2024-04-03 03:10:44 -0400
committercyfraeviolae <cyfraeviolae>2024-04-03 03:10:44 -0400
commit6d7ba58f880be618ade07f8ea080fe8c4bf8a896 (patch)
treeb1c931051ffcebd2bd9d61d98d6233ffa289bbce /venv/lib/python3.11/site-packages/sqlalchemy/connectors
parent4f884c9abc32990b4061a1bb6997b4b37e58ea0b (diff)
venv
Diffstat (limited to 'venv/lib/python3.11/site-packages/sqlalchemy/connectors')
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/__init__.py18
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-311.pycbin0 -> 682 bytes
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-311.pycbin0 -> 8027 bytes
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-311.pycbin0 -> 12757 bytes
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-311.pycbin0 -> 11185 bytes
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/aioodbc.py174
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/asyncio.py208
-rw-r--r--venv/lib/python3.11/site-packages/sqlalchemy/connectors/pyodbc.py249
8 files changed, 649 insertions, 0 deletions
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__init__.py b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__init__.py
new file mode 100644
index 0000000..f1cae0b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__init__.py
@@ -0,0 +1,18 @@
+# connectors/__init__.py
+# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+from ..engine.interfaces import Dialect
+
+
+class Connector(Dialect):
+ """Base class for dialect mixins, for DBAPIs that work
+ across entirely different database backends.
+
+ Currently the only such mixin is pyodbc.
+
+ """
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..172b726
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/__init__.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-311.pyc
new file mode 100644
index 0000000..86bb366
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/aioodbc.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-311.pyc
new file mode 100644
index 0000000..c9b451d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/asyncio.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-311.pyc b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-311.pyc
new file mode 100644
index 0000000..5805308
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/__pycache__/pyodbc.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/aioodbc.py b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/aioodbc.py
new file mode 100644
index 0000000..3b5c3b4
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/aioodbc.py
@@ -0,0 +1,174 @@
+# connectors/aioodbc.py
+# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .asyncio import AsyncAdapt_dbapi_connection
+from .asyncio import AsyncAdapt_dbapi_cursor
+from .asyncio import AsyncAdapt_dbapi_ss_cursor
+from .asyncio import AsyncAdaptFallback_dbapi_connection
+from .pyodbc import PyODBCConnector
+from .. import pool
+from .. import util
+from ..util.concurrency import await_fallback
+from ..util.concurrency import await_only
+
+if TYPE_CHECKING:
+ from ..engine.interfaces import ConnectArgsType
+ from ..engine.url import URL
+
+
+class AsyncAdapt_aioodbc_cursor(AsyncAdapt_dbapi_cursor):
+ __slots__ = ()
+
+ def setinputsizes(self, *inputsizes):
+ # see https://github.com/aio-libs/aioodbc/issues/451
+ return self._cursor._impl.setinputsizes(*inputsizes)
+
+ # how it's supposed to work
+ # return self.await_(self._cursor.setinputsizes(*inputsizes))
+
+
+class AsyncAdapt_aioodbc_ss_cursor(
+ AsyncAdapt_aioodbc_cursor, AsyncAdapt_dbapi_ss_cursor
+):
+ __slots__ = ()
+
+
+class AsyncAdapt_aioodbc_connection(AsyncAdapt_dbapi_connection):
+ _cursor_cls = AsyncAdapt_aioodbc_cursor
+ _ss_cursor_cls = AsyncAdapt_aioodbc_ss_cursor
+ __slots__ = ()
+
+ @property
+ def autocommit(self):
+ return self._connection.autocommit
+
+ @autocommit.setter
+ def autocommit(self, value):
+ # https://github.com/aio-libs/aioodbc/issues/448
+ # self._connection.autocommit = value
+
+ self._connection._conn.autocommit = value
+
+ def cursor(self, server_side=False):
+ # aioodbc sets connection=None when closed and just fails with
+ # AttributeError here. Here we use the same ProgrammingError +
+ # message that pyodbc uses, so it triggers is_disconnect() as well.
+ if self._connection.closed:
+ raise self.dbapi.ProgrammingError(
+ "Attempt to use a closed connection."
+ )
+ return super().cursor(server_side=server_side)
+
+ def rollback(self):
+ # aioodbc sets connection=None when closed and just fails with
+ # AttributeError here. should be a no-op
+ if not self._connection.closed:
+ super().rollback()
+
+ def commit(self):
+ # aioodbc sets connection=None when closed and just fails with
+ # AttributeError here. should be a no-op
+ if not self._connection.closed:
+ super().commit()
+
+ def close(self):
+ # aioodbc sets connection=None when closed and just fails with
+ # AttributeError here. should be a no-op
+ if not self._connection.closed:
+ super().close()
+
+
+class AsyncAdaptFallback_aioodbc_connection(
+ AsyncAdaptFallback_dbapi_connection, AsyncAdapt_aioodbc_connection
+):
+ __slots__ = ()
+
+
+class AsyncAdapt_aioodbc_dbapi:
+ def __init__(self, aioodbc, pyodbc):
+ self.aioodbc = aioodbc
+ self.pyodbc = pyodbc
+ self.paramstyle = pyodbc.paramstyle
+ self._init_dbapi_attributes()
+ self.Cursor = AsyncAdapt_dbapi_cursor
+ self.version = pyodbc.version
+
+ def _init_dbapi_attributes(self):
+ for name in (
+ "Warning",
+ "Error",
+ "InterfaceError",
+ "DataError",
+ "DatabaseError",
+ "OperationalError",
+ "InterfaceError",
+ "IntegrityError",
+ "ProgrammingError",
+ "InternalError",
+ "NotSupportedError",
+ "NUMBER",
+ "STRING",
+ "DATETIME",
+ "BINARY",
+ "Binary",
+ "BinaryNull",
+ "SQL_VARCHAR",
+ "SQL_WVARCHAR",
+ ):
+ setattr(self, name, getattr(self.pyodbc, name))
+
+ def connect(self, *arg, **kw):
+ async_fallback = kw.pop("async_fallback", False)
+ creator_fn = kw.pop("async_creator_fn", self.aioodbc.connect)
+
+ if util.asbool(async_fallback):
+ return AsyncAdaptFallback_aioodbc_connection(
+ self,
+ await_fallback(creator_fn(*arg, **kw)),
+ )
+ else:
+ return AsyncAdapt_aioodbc_connection(
+ self,
+ await_only(creator_fn(*arg, **kw)),
+ )
+
+
+class aiodbcConnector(PyODBCConnector):
+ is_async = True
+ supports_statement_cache = True
+
+ supports_server_side_cursors = True
+
+ @classmethod
+ def import_dbapi(cls):
+ return AsyncAdapt_aioodbc_dbapi(
+ __import__("aioodbc"), __import__("pyodbc")
+ )
+
+ def create_connect_args(self, url: URL) -> ConnectArgsType:
+ arg, kw = super().create_connect_args(url)
+ if arg and arg[0]:
+ kw["dsn"] = arg[0]
+
+ return (), kw
+
+ @classmethod
+ def get_pool_class(cls, url):
+ async_fallback = url.query.get("async_fallback", False)
+
+ if util.asbool(async_fallback):
+ return pool.FallbackAsyncAdaptedQueuePool
+ else:
+ return pool.AsyncAdaptedQueuePool
+
+ def get_driver_connection(self, connection):
+ return connection._connection
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/asyncio.py b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/asyncio.py
new file mode 100644
index 0000000..0b44f23
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/asyncio.py
@@ -0,0 +1,208 @@
+# connectors/asyncio.py
+# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+"""generic asyncio-adapted versions of DBAPI connection and cursor"""
+
+from __future__ import annotations
+
+import collections
+import itertools
+
+from ..engine import AdaptedConnection
+from ..util.concurrency import asyncio
+from ..util.concurrency import await_fallback
+from ..util.concurrency import await_only
+
+
+class AsyncAdapt_dbapi_cursor:
+ server_side = False
+ __slots__ = (
+ "_adapt_connection",
+ "_connection",
+ "await_",
+ "_cursor",
+ "_rows",
+ )
+
+ def __init__(self, adapt_connection):
+ self._adapt_connection = adapt_connection
+ self._connection = adapt_connection._connection
+ self.await_ = adapt_connection.await_
+
+ cursor = self._connection.cursor()
+ self._cursor = self._aenter_cursor(cursor)
+
+ self._rows = collections.deque()
+
+ def _aenter_cursor(self, cursor):
+ return self.await_(cursor.__aenter__())
+
+ @property
+ def description(self):
+ return self._cursor.description
+
+ @property
+ def rowcount(self):
+ return self._cursor.rowcount
+
+ @property
+ def arraysize(self):
+ return self._cursor.arraysize
+
+ @arraysize.setter
+ def arraysize(self, value):
+ self._cursor.arraysize = value
+
+ @property
+ def lastrowid(self):
+ return self._cursor.lastrowid
+
+ def close(self):
+ # note we aren't actually closing the cursor here,
+ # we are just letting GC do it. see notes in aiomysql dialect
+ self._rows.clear()
+
+ def execute(self, operation, parameters=None):
+ return self.await_(self._execute_async(operation, parameters))
+
+ def executemany(self, operation, seq_of_parameters):
+ return self.await_(
+ self._executemany_async(operation, seq_of_parameters)
+ )
+
+ async def _execute_async(self, operation, parameters):
+ async with self._adapt_connection._execute_mutex:
+ result = await self._cursor.execute(operation, parameters or ())
+
+ if self._cursor.description and not self.server_side:
+ self._rows = collections.deque(await self._cursor.fetchall())
+ return result
+
+ async def _executemany_async(self, operation, seq_of_parameters):
+ async with self._adapt_connection._execute_mutex:
+ return await self._cursor.executemany(operation, seq_of_parameters)
+
+ def nextset(self):
+ self.await_(self._cursor.nextset())
+ if self._cursor.description and not self.server_side:
+ self._rows = collections.deque(
+ self.await_(self._cursor.fetchall())
+ )
+
+ def setinputsizes(self, *inputsizes):
+ # NOTE: this is overrridden in aioodbc due to
+ # see https://github.com/aio-libs/aioodbc/issues/451
+ # right now
+
+ return self.await_(self._cursor.setinputsizes(*inputsizes))
+
+ def __iter__(self):
+ while self._rows:
+ yield self._rows.popleft()
+
+ def fetchone(self):
+ if self._rows:
+ return self._rows.popleft()
+ else:
+ return None
+
+ def fetchmany(self, size=None):
+ if size is None:
+ size = self.arraysize
+
+ rr = iter(self._rows)
+ retval = list(itertools.islice(rr, 0, size))
+ self._rows = collections.deque(rr)
+ return retval
+
+ def fetchall(self):
+ retval = list(self._rows)
+ self._rows.clear()
+ return retval
+
+
+class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor):
+ __slots__ = ()
+ server_side = True
+
+ def __init__(self, adapt_connection):
+ self._adapt_connection = adapt_connection
+ self._connection = adapt_connection._connection
+ self.await_ = adapt_connection.await_
+
+ cursor = self._connection.cursor()
+
+ self._cursor = self.await_(cursor.__aenter__())
+
+ def close(self):
+ if self._cursor is not None:
+ self.await_(self._cursor.close())
+ self._cursor = None
+
+ def fetchone(self):
+ return self.await_(self._cursor.fetchone())
+
+ def fetchmany(self, size=None):
+ return self.await_(self._cursor.fetchmany(size=size))
+
+ def fetchall(self):
+ return self.await_(self._cursor.fetchall())
+
+
+class AsyncAdapt_dbapi_connection(AdaptedConnection):
+ _cursor_cls = AsyncAdapt_dbapi_cursor
+ _ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor
+
+ await_ = staticmethod(await_only)
+ __slots__ = ("dbapi", "_execute_mutex")
+
+ def __init__(self, dbapi, connection):
+ self.dbapi = dbapi
+ self._connection = connection
+ self._execute_mutex = asyncio.Lock()
+
+ def ping(self, reconnect):
+ return self.await_(self._connection.ping(reconnect))
+
+ def add_output_converter(self, *arg, **kw):
+ self._connection.add_output_converter(*arg, **kw)
+
+ def character_set_name(self):
+ return self._connection.character_set_name()
+
+ @property
+ def autocommit(self):
+ return self._connection.autocommit
+
+ @autocommit.setter
+ def autocommit(self, value):
+ # https://github.com/aio-libs/aioodbc/issues/448
+ # self._connection.autocommit = value
+
+ self._connection._conn.autocommit = value
+
+ def cursor(self, server_side=False):
+ if server_side:
+ return self._ss_cursor_cls(self)
+ else:
+ return self._cursor_cls(self)
+
+ def rollback(self):
+ self.await_(self._connection.rollback())
+
+ def commit(self):
+ self.await_(self._connection.commit())
+
+ def close(self):
+ self.await_(self._connection.close())
+
+
+class AsyncAdaptFallback_dbapi_connection(AsyncAdapt_dbapi_connection):
+ __slots__ = ()
+
+ await_ = staticmethod(await_fallback)
diff --git a/venv/lib/python3.11/site-packages/sqlalchemy/connectors/pyodbc.py b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/pyodbc.py
new file mode 100644
index 0000000..f204d80
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/sqlalchemy/connectors/pyodbc.py
@@ -0,0 +1,249 @@
+# connectors/pyodbc.py
+# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import re
+from types import ModuleType
+import typing
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Union
+from urllib.parse import unquote_plus
+
+from . import Connector
+from .. import ExecutionContext
+from .. import pool
+from .. import util
+from ..engine import ConnectArgsType
+from ..engine import Connection
+from ..engine import interfaces
+from ..engine import URL
+from ..sql.type_api import TypeEngine
+
+if typing.TYPE_CHECKING:
+ from ..engine.interfaces import IsolationLevel
+
+
+class PyODBCConnector(Connector):
+ driver = "pyodbc"
+
+ # this is no longer False for pyodbc in general
+ supports_sane_rowcount_returning = True
+ supports_sane_multi_rowcount = False
+
+ supports_native_decimal = True
+ default_paramstyle = "named"
+
+ fast_executemany = False
+
+ # for non-DSN connections, this *may* be used to
+ # hold the desired driver name
+ pyodbc_driver_name: Optional[str] = None
+
+ dbapi: ModuleType
+
+ def __init__(self, use_setinputsizes: bool = False, **kw: Any):
+ super().__init__(**kw)
+ if use_setinputsizes:
+ self.bind_typing = interfaces.BindTyping.SETINPUTSIZES
+
+ @classmethod
+ def import_dbapi(cls) -> ModuleType:
+ return __import__("pyodbc")
+
+ def create_connect_args(self, url: URL) -> ConnectArgsType:
+ opts = url.translate_connect_args(username="user")
+ opts.update(url.query)
+
+ keys = opts
+
+ query = url.query
+
+ connect_args: Dict[str, Any] = {}
+ connectors: List[str]
+
+ for param in ("ansi", "unicode_results", "autocommit"):
+ if param in keys:
+ connect_args[param] = util.asbool(keys.pop(param))
+
+ if "odbc_connect" in keys:
+ connectors = [unquote_plus(keys.pop("odbc_connect"))]
+ else:
+
+ def check_quote(token: str) -> str:
+ if ";" in str(token) or str(token).startswith("{"):
+ token = "{%s}" % token.replace("}", "}}")
+ return token
+
+ keys = {k: check_quote(v) for k, v in keys.items()}
+
+ dsn_connection = "dsn" in keys or (
+ "host" in keys and "database" not in keys
+ )
+ if dsn_connection:
+ connectors = [
+ "dsn=%s" % (keys.pop("host", "") or keys.pop("dsn", ""))
+ ]
+ else:
+ port = ""
+ if "port" in keys and "port" not in query:
+ port = ",%d" % int(keys.pop("port"))
+
+ connectors = []
+ driver = keys.pop("driver", self.pyodbc_driver_name)
+ if driver is None and keys:
+ # note if keys is empty, this is a totally blank URL
+ util.warn(
+ "No driver name specified; "
+ "this is expected by PyODBC when using "
+ "DSN-less connections"
+ )
+ else:
+ connectors.append("DRIVER={%s}" % driver)
+
+ connectors.extend(
+ [
+ "Server=%s%s" % (keys.pop("host", ""), port),
+ "Database=%s" % keys.pop("database", ""),
+ ]
+ )
+
+ user = keys.pop("user", None)
+ if user:
+ connectors.append("UID=%s" % user)
+ pwd = keys.pop("password", "")
+ if pwd:
+ connectors.append("PWD=%s" % pwd)
+ else:
+ authentication = keys.pop("authentication", None)
+ if authentication:
+ connectors.append("Authentication=%s" % authentication)
+ else:
+ connectors.append("Trusted_Connection=Yes")
+
+ # if set to 'Yes', the ODBC layer will try to automagically
+ # convert textual data from your database encoding to your
+ # client encoding. This should obviously be set to 'No' if
+ # you query a cp1253 encoded database from a latin1 client...
+ if "odbc_autotranslate" in keys:
+ connectors.append(
+ "AutoTranslate=%s" % keys.pop("odbc_autotranslate")
+ )
+
+ connectors.extend(["%s=%s" % (k, v) for k, v in keys.items()])
+
+ return ((";".join(connectors),), connect_args)
+
+ def is_disconnect(
+ self,
+ e: Exception,
+ connection: Optional[
+ Union[pool.PoolProxiedConnection, interfaces.DBAPIConnection]
+ ],
+ cursor: Optional[interfaces.DBAPICursor],
+ ) -> bool:
+ if isinstance(e, self.dbapi.ProgrammingError):
+ return "The cursor's connection has been closed." in str(
+ e
+ ) or "Attempt to use a closed connection." in str(e)
+ else:
+ return False
+
+ def _dbapi_version(self) -> interfaces.VersionInfoType:
+ if not self.dbapi:
+ return ()
+ return self._parse_dbapi_version(self.dbapi.version)
+
+ def _parse_dbapi_version(self, vers: str) -> interfaces.VersionInfoType:
+ m = re.match(r"(?:py.*-)?([\d\.]+)(?:-(\w+))?", vers)
+ if not m:
+ return ()
+ vers_tuple: interfaces.VersionInfoType = tuple(
+ [int(x) for x in m.group(1).split(".")]
+ )
+ if m.group(2):
+ vers_tuple += (m.group(2),)
+ return vers_tuple
+
+ def _get_server_version_info(
+ self, connection: Connection
+ ) -> interfaces.VersionInfoType:
+ # NOTE: this function is not reliable, particularly when
+ # freetds is in use. Implement database-specific server version
+ # queries.
+ dbapi_con = connection.connection.dbapi_connection
+ version: Tuple[Union[int, str], ...] = ()
+ r = re.compile(r"[.\-]")
+ for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)): # type: ignore[union-attr] # noqa: E501
+ try:
+ version += (int(n),)
+ except ValueError:
+ pass
+ return tuple(version)
+
+ def do_set_input_sizes(
+ self,
+ cursor: interfaces.DBAPICursor,
+ list_of_tuples: List[Tuple[str, Any, TypeEngine[Any]]],
+ context: ExecutionContext,
+ ) -> None:
+ # the rules for these types seems a little strange, as you can pass
+ # non-tuples as well as tuples, however it seems to assume "0"
+ # for the subsequent values if you don't pass a tuple which fails
+ # for types such as pyodbc.SQL_WLONGVARCHAR, which is the datatype
+ # that ticket #5649 is targeting.
+
+ # NOTE: as of #6058, this won't be called if the use_setinputsizes
+ # parameter were not passed to the dialect, or if no types were
+ # specified in list_of_tuples
+
+ # as of #8177 for 2.0 we assume use_setinputsizes=True and only
+ # omit the setinputsizes calls for .executemany() with
+ # fast_executemany=True
+
+ if (
+ context.execute_style is interfaces.ExecuteStyle.EXECUTEMANY
+ and self.fast_executemany
+ ):
+ return
+
+ cursor.setinputsizes(
+ [
+ (
+ (dbtype, None, None)
+ if not isinstance(dbtype, tuple)
+ else dbtype
+ )
+ for key, dbtype, sqltype in list_of_tuples
+ ]
+ )
+
+ def get_isolation_level_values(
+ self, dbapi_connection: interfaces.DBAPIConnection
+ ) -> List[IsolationLevel]:
+ return super().get_isolation_level_values(dbapi_connection) + [
+ "AUTOCOMMIT"
+ ]
+
+ def set_isolation_level(
+ self,
+ dbapi_connection: interfaces.DBAPIConnection,
+ level: IsolationLevel,
+ ) -> None:
+ # adjust for ConnectionFairy being present
+ # allows attribute set e.g. "connection.autocommit = True"
+ # to work properly
+
+ if level == "AUTOCOMMIT":
+ dbapi_connection.autocommit = True
+ else:
+ dbapi_connection.autocommit = False
+ super().set_isolation_level(dbapi_connection, level)